From 8ee2efdb4201b551c86ff693c9f89542e3d06e0b Mon Sep 17 00:00:00 2001 From: zhiheng huang Date: Tue, 1 Oct 2024 21:53:31 -0700 Subject: [PATCH] Add Anthropic contextual retrieval experiments --- denser_retriever/embeddings.py | 37 + denser_retriever/keyword.py | 12 +- denser_retriever/reranker.py | 64 +- denser_retriever/utils.py | 18 +- .../data/contextual-embeddings/README.md | 247 + .../data/contextual-embeddings/create_data.py | 61 + .../data_base/passages.jsonl | 737 +++ .../data_base/qrels.jsonl | 248 + .../data_base/queries.jsonl | 248 + .../data_context/passages.jsonl | 737 +++ .../data_context/qrels.jsonl | 248 + .../data_context/queries.jsonl | 248 + .../original_data/codebase_chunks.json | 4317 +++++++++++++++++ .../original_data/evaluation_set.jsonl | 248 + experiments/denser_data.py | 18 + experiments/train_and_test.py | 80 +- experiments/utils.py | 10 + 17 files changed, 7538 insertions(+), 40 deletions(-) create mode 100644 experiments/data/contextual-embeddings/README.md create mode 100644 experiments/data/contextual-embeddings/create_data.py create mode 100644 experiments/data/contextual-embeddings/data_base/passages.jsonl create mode 100644 experiments/data/contextual-embeddings/data_base/qrels.jsonl create mode 100644 experiments/data/contextual-embeddings/data_base/queries.jsonl create mode 100644 experiments/data/contextual-embeddings/data_context/passages.jsonl create mode 100644 experiments/data/contextual-embeddings/data_context/qrels.jsonl create mode 100644 experiments/data/contextual-embeddings/data_context/queries.jsonl create mode 100644 experiments/data/contextual-embeddings/original_data/codebase_chunks.json create mode 100644 experiments/data/contextual-embeddings/original_data/evaluation_set.jsonl create mode 100644 experiments/denser_data.py diff --git a/denser_retriever/embeddings.py b/denser_retriever/embeddings.py index 411928d..abc8b15 100644 --- a/denser_retriever/embeddings.py +++ b/denser_retriever/embeddings.py @@ -38,3 +38,40 @@ def embed_query(self, text): else: embeddings = self.client.encode([text], prompt_name="query") return embeddings + + +class VoyageAPIEmbeddings(DenserEmbeddings): + def __init__(self, api_key: str, model_name: str, embedding_size: int): + try: + import voyageai + except ImportError as exc: + raise ImportError( + "Could not import voyage python package. " + "Please install it with `pip install voyageai`." + ) from exc + + self.client = voyageai.Client(api_key) + self.model_name = model_name + self.embedding_size = embedding_size + + def embed_documents(self, texts): + """ + Embeds multiple documents using the Voyage API. + Args: + texts: A list of document texts. + Returns: + A list of document embeddings. + """ + embeddings = self.client.embed(texts, model=self.model_name).embeddings + return embeddings + + def embed_query(self, text): + """ + Embeds a single query using the Voyage API. + Args: + text: The query text. + Returns: + The query embedding. + """ + embeddings = self.client.embed([text], model=self.model_name).embeddings + return embeddings diff --git a/denser_retriever/keyword.py b/denser_retriever/keyword.py index 5e2d1da..b6cea41 100644 --- a/denser_retriever/keyword.py +++ b/denser_retriever/keyword.py @@ -244,9 +244,13 @@ def add_documents( "pid": metadata.get("pid"), } for filter in self.search_fields.get_keys(): - v = metadata.get(filter, "").strip() - if v: - request[filter] = v + value = metadata.get(filter, "") + if isinstance(value, list): + value = [v.strip() for v in value] + elif value is not None: + value = value.strip() + if value: + request[filter] = value requests.append(request) if len(requests) > 0: @@ -342,7 +346,7 @@ def retrieve( }, ) score = res["hits"]["hits"][id]["_score"] - for field in filter: + for field in self.search_fields.get_keys(): if _source.get(field): doc.metadata[field] = _source.get(field) docs.append((doc, score)) diff --git a/denser_retriever/reranker.py b/denser_retriever/reranker.py index e1ac5ad..aaae846 100644 --- a/denser_retriever/reranker.py +++ b/denser_retriever/reranker.py @@ -3,7 +3,8 @@ from typing import List, Sequence, Tuple import time import logging -from langchain_community.cross_encoders import HuggingFaceCrossEncoder +import cohere +from sentence_transformers import CrossEncoder from langchain_core.documents import Document logger = logging.getLogger(__name__) @@ -25,9 +26,9 @@ def rerank( class HFReranker(DenserReranker): """Rerank documents using a HuggingFaceCrossEncoder model.""" - def __init__(self, model_name: str, model_kwargs: dict = {}, **kwargs): - super().__init__() - self.model = HuggingFaceCrossEncoder(model_name=model_name, model_kwargs = model_kwargs) + def __init__(self, model_name: str, top_k: int, **kwargs): + super().__init__(top_k=top_k) + self.model = CrossEncoder(model_name, **kwargs) def rerank( self, @@ -47,7 +48,7 @@ def rerank( if not documents: return [] start_time = time.time() - scores = self.model.score([(query, doc.page_content) for doc in documents]) + scores = self.model.predict([(query, doc.page_content) for doc in documents], convert_to_tensor=True) docs_with_scores = list(zip(documents, scores)) result = sorted(docs_with_scores, key=operator.itemgetter(1), reverse=True) rerank_time_sec = time.time() - start_time @@ -55,3 +56,56 @@ def rerank( logger.info(f"Reranked {len(result)} documents.") return result + +class CohereReranker(DenserReranker): + """Rerank documents using the Cohere API.""" + + def __init__(self, api_key: str, model_name: str = "rerank-english-v3.0", **kwargs): + """ + Initialize Cohere reranker. + + Args: + api_key: The API key for Cohere. + model_name: The name of the Cohere model to use for reranking. + """ + super().__init__() + self.client = cohere.Client(api_key) + self.model_name = model_name + + def rerank( + self, + documents: Sequence[Document], + query: str, + ) -> List[Tuple[Document, float]]: + """ + Rerank documents using Cohere's reranking model. + + Args: + documents: A sequence of documents to rerank. + query: The query to use for ranking the documents. + + Returns: + A list of tuples containing the document and its score. + """ + if not documents: + return [] + + start_time = time.time() + + # Prepare documents for reranking + texts = [doc.page_content for doc in documents] + response = self.client.rerank( + model=self.model_name, + query=query, + documents=texts + ) + # Combine documents with scores from the rerank response + docs_with_scores = [(documents[result.index], result.relevance_score) for result in response.results] + + # Sort the documents by their scores in descending order + result = sorted(docs_with_scores, key=operator.itemgetter(1), reverse=True) + + rerank_time_sec = time.time() - start_time + logger.info(f"Cohere Rerank time: {rerank_time_sec:.3f} sec.") + logger.info(f"Reranked {len(result)} documents.") + return result diff --git a/denser_retriever/utils.py b/denser_retriever/utils.py index d787c9c..bae90fb 100644 --- a/denser_retriever/utils.py +++ b/denser_retriever/utils.py @@ -12,7 +12,7 @@ def evaluate( qrels: Dict[str, Dict[str, int]], results: Dict[str, Dict[str, float]], metric_file: Optional[str] = None, - k_values: List[int] = [1, 3, 5, 10, 100, 1000], + k_values: List[int] = [1, 3, 5, 10, 20, 100, 1000], ignore_identical_ids: bool = True, ) -> Tuple[Dict[str, float], Dict[str, float], Dict[str, float], Dict[str, float]]: if ignore_identical_ids: @@ -76,6 +76,11 @@ def save_queries(queries, output_file: str): json.dump(data, out, ensure_ascii=False) out.write("\n") +def load_queries(in_file: str): + res = [] + for line in open(in_file, "r"): + res.append(json.loads(line)) + return res def save_qrels(qrels, output_file: str): out = open(output_file, "w") @@ -84,6 +89,17 @@ def save_qrels(qrels, output_file: str): json.dump(data, out, ensure_ascii=False) out.write("\n") +def save_qrels_from_trec(trec_file, qrels_file): + qrels = {} + with open(trec_file, "r") as f: + for line in f: + qid, _, pid, rel = line.split() + if qid not in qrels: + qrels[qid] = {} + qrels[qid][pid] = int(rel) + + save_qrels(qrels, qrels_file) + return qrels def load_qrels(in_file: str): res = {} diff --git a/experiments/data/contextual-embeddings/README.md b/experiments/data/contextual-embeddings/README.md new file mode 100644 index 0000000..b0d9549 --- /dev/null +++ b/experiments/data/contextual-embeddings/README.md @@ -0,0 +1,247 @@ +# Anthropic Contextual Retrieval Dataset + +In Anthropic's recent blog post [Introducing Contextual Retrieval](https://www.anthropic.com/news/contextual-retrieval), +they proposed a method that improves the retrieval step in RAG. The method is called “Contextual Retrieval”, which leads +to significant improvements in retrieval accuracy and translates to better performance in downstream tasks. +In this experiment, we benchmark the Anthropic Retrieval dataset in Denser Retriever. Our key +findings are as follows: + +1. The Anthropic cookbook demonstrates a prototype of Retrieval-Augmented Generation (RAG) but lacks scalability for + large systems. For instance, it loads all document embeddings into memory, which becomes impractical for a large + retrieval corpus. We first reproduce the experiments using + the [Denser Retriever](https://github.com/denser-org/denser-retriever) codebase. With built-in support for + Elasticsearch and vector search, our implementation is prepared for deployment in large-scale industrial + applications. + +2. Denser Retriever offers various configuration options for building retrievers. Users can choose between paid API + services or open-source (free) models to balance accuracy and cost. In our experiments on the Anthropic contextual + retrieval dataset, we demonstrate that comparable accuracy can be achieved by substituting paid model APIs with + open-source models. This flexibility is crucial for production deployments where managing costs is a priority. + +## Dataset + +The dataset referenced in the blog +post [Introducing Contextual Retrieval](https://www.anthropic.com/news/contextual-retrieval) and the +Anthropic [Cookbook](https://github.com/anthropics/anthropic-cookbook/tree/main/skills/contextual-embeddings) is located +in the `original_dataset` directory. It contains a total of 248 queries and 737 documents. + +In order to utilize the dataset in Denser Retriever experiments, we run the following command to generate two +datasets, `data_base` and `data_context`. + +```bash +python experiments/data/contextual-embeddings/create_data.py +``` + +The `data_base` dataset is the original dataset, and the `data_context` +dataset is augmented with contextual text as proposed in the Anthropic blog post. Each dataset consists of query +file `queries.json`, +document file `passages.jsonl`, and relevance file `qrels.jsonl`. We note that the difference between the two datasets +is that the `data_context` document file `passages.jsonl` contains the augmented document contexts from the Anthropic +API (see the blog post for more details). We include this file so that +users can directly use them without calling the Anthropic API. + +## Baseline Experiment + +We run the Denser Retriever experiments on the `data_base` dataset with the following command. + +```bash +python experiments/train_and_test.py anthropic_base test test +``` + +`anthropic_base` is the experiment dataset name, two `test` are the splits for training and test respectively. They are +identical in our case. Under the hood, we apply 3-fold cross-validation to the whole dataset (248 queries). Three train +and test experiments are conducted. In each run, 2/3 of the queries are used to train Denser Retriever and 1/3 of the +remainder queries are used to test. The final test accuracy is averaged over these three experiments. Following the +Anthropic blog setting, we use Voyage `voyage-2` API model for vector embedding and Cohere `rerank-english-v3.0` API +model for re-ranking. Interested users can refer to the `experiments/train_and_test.py` script for more details. + +The following shows the results of the Denser Retriever on the `data_base` dataset. `keyword` is the BM25 +method, `vector` is the Voyage-2 vector search, `reranker` is Cohere `rerank-english-v3.0` rerank accuracy based +on `keyword` and `vector` searches. All remaining methods are the combinations of keyword search, vector search and +reranker, which are proposed and implemented in Denser Retriever. For example, `es+vs` is the combination +of keyword search and vector search. `es+vs_n` is a variant of `es+vs` with additional normalization in combining +keyword search and vector search. While Anthropic blog only reported Recall@20, we additionally report NDCG@20, as the +latter factors in the ground truth position when evaluating the search results. + +``` +== NDCG@20 +metric_keyword.json: "NDCG@20": 0.47541, +metric_vector.json: "NDCG@20": 0.73526, +metric_reranker.json: "NDCG@20": 0.81858, +metric_es+vs.json: "NDCG@20": 0.74581, +metric_es+rr.json: "NDCG@20": 0.81903, +metric_vs+rr.json: "NDCG@20": 0.81635, +metric_es+vs+rr.json: "NDCG@20": 0.82218, +metric_es+vs_n.json: "NDCG@20": 0.75084, +metric_es+rr_n.json: "NDCG@20": 0.80381, +metric_vs+rr_n.json: "NDCG@20": 0.81515, +metric_es+vs+rr_n.json: "NDCG@20": 0.81169, + +== Recall@20 +metric_keyword.json: "Recall@20": 0.70488, +metric_vector.json: "Recall@20": 0.90063, +metric_reranker.json: "Recall@20": 0.94158, +metric_es+vs.json: "Recall@20": 0.90711, +metric_es+rr.json: "Recall@20": 0.94081, +metric_vs+rr.json: "Recall@20": 0.93568, +metric_es+vs+rr.json: "Recall@20": 0.94249, +metric_es+vs_n.json: "Recall@20": 0.90903, +metric_es+rr_n.json: "Recall@20": 0.92276, +metric_vs+rr_n.json: "Recall@20": 0.94105, +metric_es+vs+rr_n.json: "Recall@20": 0.93232, +``` + +The baseline experiment result is consistent with the Anthropic cookbook run. Specifically, the +Recall@20 of 0.90063 confirms the Anthropic cookbook result of 0.9006. The keyword and vector search lead to the +Recall@20 of 0.704 and 0.900 respectively. The reranker further improves the Recall@20 to 0.94158. The same trend is +observed in the NDCG@20 metric. We note that the method es+vs+rr_n does not offer better accuracy than +reranker, partially due to the strong performance of the reranker. This is different from +the [MTEB benchmarks](https://retriever-docs.denser.ai/docs/core/experiments/mteb_retrieval) where es+vs+rr_n offers the +best accuracy. + +## Contextual Experiment + +We now run the Denser Retriever experiments on the `data_context` dataset with the following command. The difference +between the `data_base` and `data_context` datasets is that the `data_context` dataset contains the augmented +document contexts from the Anthropic API. + +```bash +python experiments/train_and_test.py anthropic_context test test +``` + +Upon running the above command, the Denser Retriever is trained and tested on the `data_context` dataset. The results +are shown below. + +``` +== NDCG@20 +metric_keyword.json: "NDCG@20": 0.7041, +metric_vector.json: "NDCG@20": 0.75732, +metric_reranker.json: "NDCG@20": 0.8393, +metric_es+vs.json: "NDCG@20": 0.76807, +metric_es+rr.json: "NDCG@20": 0.83337, +metric_vs+rr.json: "NDCG@20": 0.8298, +metric_es+vs+rr.json: "NDCG@20": 0.83267, +metric_es+vs_n.json: "NDCG@20": 0.76734, +metric_es+rr_n.json: "NDCG@20": 0.83657, +metric_vs+rr_n.json: "NDCG@20": 0.82849, +metric_es+vs+rr_n.json: "NDCG@20": 0.83085, + +== Recall@20 +metric_keyword.json: "Recall@20": 0.89267, +metric_vector.json: "Recall@20": 0.94489, +metric_reranker.json: "Recall@20": 0.96102, +metric_es+vs.json: "Recall@20": 0.9543, +metric_es+rr.json: "Recall@20": 0.958, +metric_vs+rr.json: "Recall@20": 0.95195, +metric_es+vs+rr.json: "Recall@20": 0.95699, +metric_es+vs_n.json: "Recall@20": 0.94153, +metric_es+rr_n.json: "Recall@20": 0.95766, +metric_vs+rr_n.json: "Recall@20": 0.95228, +metric_es+vs+rr_n.json: "Recall@20": 0.95699, +``` + +The results of the contextual experiments are consistent with the Anthropic cookbook run. Specifically, the +Recall@20 of 94.48 is similar to Anthropic cookbook result of 94.08. The keyword and vector search lead to the Recall@20 +of 89.26 and 94.48 respectively. The reranker further improves the Recall@20 to 96.10. The same trend is observed in the +NDCG@20 metric. Compared to the baseline experiment, the contextual augmentation improves the keyword search more than +that of the vector search. Specifically, it boosts the keyword Recall@20 from 70.48 to 89.26 (18.78 points increase), +while the vector Recall@20 only improves from 90.06 to 94.48 (4.42 points increase). The reranker further improves the +Recall@20 from 94.15 to 96.10. The same trend is observed in the NDCG@20 metric. + +## Contextual Experiments with Open Source (Free) Models + +In above experiments (either baseline or contextual), we used the paid API models from Voyage and Cohere. It costs **a +few cents** and around **$1** from Voyage and Cohere respectively. In practice, a production retriever system can +consist of many more documents than the 737 documents used in this experiment. Therefore, it is crucial to have +different model choices to reduce the cost. In [Denser Retriever](https://github.com/denser-org/denser-retriever), users +can optionally opt out vector search or reranker models if they are costly. In addition, users can utilize different +models including open source free models from HuggingFACE to build their retriever to meet the use cases. + +### Using bge-reranker-base Rerank Model + +If we replace Cohere `rerank-english-v3.0` with `BAAI/bge-reranker-base` model as following + +```python +reranker = HFReranker(model_name="BAAI/bge-reranker-base", top_k=100), +``` + +we get the following results + +``` +== NDCG@20 +metric_keyword.json: "NDCG@20": 0.7041, +metric_vector.json: "NDCG@20": 0.75732, +metric_reranker.json: "NDCG@20": 0.74044, +metric_es+vs.json: "NDCG@20": 0.77171, +metric_es+rr.json: "NDCG@20": 0.76259, +metric_vs+rr.json: "NDCG@20": 0.76896, +metric_es+vs+rr.json: "NDCG@20": 0.7829, +metric_es+vs_n.json: "NDCG@20": 0.77052, +metric_es+rr_n.json: "NDCG@20": 0.76974, +metric_vs+rr_n.json: "NDCG@20": 0.76036, +metric_es+vs+rr_n.json: "NDCG@20": 0.77677, + +== Recall@20 +metric_keyword.json: "Recall@20": 0.89267, +metric_vector.json: "Recall@20": 0.94489, +metric_reranker.json: "Recall@20": 0.91969, +metric_es+vs.json: "Recall@20": 0.95027, +metric_es+rr.json: "Recall@20": 0.92113, +metric_vs+rr.json: "Recall@20": 0.93212, +metric_es+vs+rr.json: "Recall@20": 0.94724, +metric_es+vs_n.json: "Recall@20": 0.93817, +metric_es+rr_n.json: "Recall@20": 0.91599, +metric_vs+rr_n.json: "Recall@20": 0.93212, +metric_es+vs+rr_n.json: "Recall@20": 0.93817, +``` + +In terms of Recall@20 metric, the open source model `BAAI/bge-reranker-base` is worse than the paid +model `rerank-english-v3.0`. The Recall@20 of the open source model is 91.97, while the paid model is 96.10. However, +the +Denser Retriever method `es+vs_rr_n` boosts the Recall@20 to 93.81, significantly reducing the gap. The NDCG@20 +metric shows a similar trend. The NDCG@20 of the open source model is 74.04, the paid model is 83.93, and +the `es+vs+rr_n` is +77.67. + +### Using jina-reranker-v2-base-multilingual Rerank Model + +If we use `jinaai/jina-reranker-v2-base-multilingual` model as following + +```python +reranker = HFReranker(model_name="jinaai/jina-reranker-v2-base-multilingual", top_k=100, + automodel_args={"torch_dtype": "float32"}, trust_remote_code=True), +``` + +we get the following results + +``` +== NDCG@20 +metric_keyword.json: "NDCG@20": 0.7041, +metric_vector.json: "NDCG@20": 0.75732, +metric_reranker.json: "NDCG@20": 0.79981, +metric_es+vs.json: "NDCG@20": 0.77244, +metric_es+rr.json: "NDCG@20": 0.80677, +metric_vs+rr.json: "NDCG@20": 0.80539, +metric_es+vs+rr.json: "NDCG@20": 0.81169, +metric_es+vs_n.json: "NDCG@20": 0.77717, +metric_es+rr_n.json: "NDCG@20": 0.79943, +metric_vs+rr_n.json: "NDCG@20": 0.80551, +metric_es+vs+rr_n.json: "NDCG@20": 0.80659, + +== Recall@20 +metric_keyword.json: "Recall@20": 0.89267, +metric_vector.json: "Recall@20": 0.94489, +metric_reranker.json: "Recall@20": 0.96304, +metric_es+vs.json: "Recall@20": 0.94825, +metric_es+rr.json: "Recall@20": 0.96035, +metric_vs+rr.json: "Recall@20": 0.96169, +metric_es+vs+rr.json: "Recall@20": 0.95128, +metric_es+vs_n.json: "Recall@20": 0.93548, +metric_es+rr_n.json: "Recall@20": 0.95766, +metric_vs+rr_n.json: "Recall@20": 0.95195, +metric_es+vs+rr_n.json: "Recall@20": 0.94926, +``` + +The model `jina-reranker-v2-base-multilingual` outperforms `rerank-english-v3.0` (96.30 vs 96.10) in terms of Recall@20 +metric. The NDCG@20 of `jina-reranker-v2-base-multilingual` model is 79.98 while `rerank-english-v3.0` model is 83.93. +The `es+vs+rr` is 81.16 which helps close the accuracy gap. \ No newline at end of file diff --git a/experiments/data/contextual-embeddings/create_data.py b/experiments/data/contextual-embeddings/create_data.py new file mode 100644 index 0000000..02c0fc7 --- /dev/null +++ b/experiments/data/contextual-embeddings/create_data.py @@ -0,0 +1,61 @@ +import json +from langchain_core.documents import Document +import os +import pickle + + +def create_contextual_data(original_data_dir, output_data_dir, add_anthropic_context): + if not os.path.exists(output_data_dir): + os.makedirs(output_data_dir) + out_passages = open(os.path.join(output_data_dir, 'passages.jsonl'), 'w') + out_queries = open(os.path.join(output_data_dir, 'queries.jsonl'), 'w') + out_qrels = open(os.path.join(output_data_dir, 'qrels.jsonl'), 'w') + + if add_anthropic_context: + with open(os.path.join(output_data_dir, "contextual_vector_db.pkl"), "rb") as file: + data = pickle.load(file) + meta = data["metadata"] + + with open(os.path.join(original_data_dir, 'codebase_chunks.json'), 'r') as input_file: + doc_id = 0 + docs = json.loads(input_file.read()) + for doc in docs: + doc_uuid = doc["original_uuid"] + for chunk in doc['chunks']: + if add_anthropic_context: + page_content = chunk.pop('content') + "\n\n" + meta[doc_id]['contextualized_content'] + doc_id += 1 + else: + page_content = chunk.pop('content') + metadata = chunk + metadata['pid'] = doc_uuid + "_" + str(metadata['original_index']) + new_doc = Document(page_content=page_content, metadata=metadata) + out_passages.write(json.dumps(new_doc.dict(), ensure_ascii=False) + "\n") + + if add_anthropic_context: + assert doc_id == len(meta) + + with open(os.path.join(original_data_dir, 'evaluation_set.jsonl'), 'r') as input_file: + query_id = 0 + for line in input_file: + data = json.loads(line) + query_dict = {"id": str(query_id), "text": data['query']} + out_queries.write(json.dumps(query_dict) + '\n') + labels = [] + for gold_doc, passage_index in data['golden_chunk_uuids']: + labels.append(gold_doc + "_" + str(passage_index)) + query_to_labels = {str(query_id): {label: 1 for label in labels}} + out_qrels.write(json.dumps(query_to_labels) + '\n') + query_id += 1 + + +if __name__ == "__main__": + original_data_dir = "experiments/data/contextual-embeddings/original_data" + + output_data_dir = "experiments/data/contextual-embeddings/data_base" + add_anthropic_context = False + create_contextual_data(original_data_dir, output_data_dir, add_anthropic_context) + + output_data_dir = "experiments/data/contextual-embeddings/data_context" + add_anthropic_context = True + create_contextual_data(original_data_dir, output_data_dir, add_anthropic_context) diff --git a/experiments/data/contextual-embeddings/data_base/passages.jsonl b/experiments/data/contextual-embeddings/data_base/passages.jsonl new file mode 100644 index 0000000..ee0a61d --- /dev/null +++ b/experiments/data/contextual-embeddings/data_base/passages.jsonl @@ -0,0 +1,737 @@ +{"page_content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\n", "metadata": {"chunk_id": "doc_1_chunk_0", "original_index": 0, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_0"}, "type": "Document"} +{"page_content": "impl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n", "metadata": {"chunk_id": "doc_1_chunk_1", "original_index": 1, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_1"}, "type": "Document"} +{"page_content": " /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n", "metadata": {"chunk_id": "doc_1_chunk_2", "original_index": 2, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_2"}, "type": "Document"} +{"page_content": " .pre_observe_first_all(observers.primary.as_mut())?;\n observers.primary.as_mut().pre_exec_all(state, input)?;\n let ret1 = self.primary.run_target(fuzzer, state, mgr, input)?;\n observers\n .primary\n .as_mut()\n .post_exec_all(state, input, &ret1)?;\n observers\n .differential\n .post_observe_first_all(observers.primary.as_mut())?;\n observers\n .differential\n", "metadata": {"chunk_id": "doc_1_chunk_3", "original_index": 3, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_3"}, "type": "Document"} +{"page_content": " .pre_observe_second_all(observers.secondary.as_mut())?;\n observers.secondary.as_mut().pre_exec_all(state, input)?;\n let ret2 = self.secondary.run_target(fuzzer, state, mgr, input)?;\n observers\n .secondary\n .as_mut()\n .post_exec_all(state, input, &ret2)?;\n observers\n .differential\n .post_observe_second_all(observers.secondary.as_mut())?;\n if ret1 == ret2 {\n Ok(ret1)\n } else {\n // We found a diff in the exit codes!\n Ok(ExitKind::Diff {\n primary: ret1.into(),\n secondary: ret2.into(),\n })\n }\n }\n}\n\n", "metadata": {"chunk_id": "doc_1_chunk_4", "original_index": 4, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_4"}, "type": "Document"} +{"page_content": "/// Proxy the observers of the inner executors\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(\n bound = \"A: serde::Serialize + serde::de::DeserializeOwned, B: serde::Serialize + serde::de::DeserializeOwned, DOT: serde::Serialize + serde::de::DeserializeOwned\"\n)]\npub struct ProxyObserversTuple {\n primary: OwnedMutPtr,\n secondary: OwnedMutPtr,\n differential: DOT,\n}\n\nimpl ObserversTuple for ProxyObserversTuple\nwhere\n A: ObserversTuple,\n B: ObserversTuple,\n DOT: DifferentialObserversTuple,\n S: UsesInput,\n{\n fn pre_exec_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_all(state, input)\n }\n\n", "metadata": {"chunk_id": "doc_1_chunk_5", "original_index": 5, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_5"}, "type": "Document"} +{"page_content": " fn post_exec_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential.post_exec_all(state, input, exit_kind)\n }\n\n fn pre_exec_child_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_child_all(state, input)\n }\n\n fn post_exec_child_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential\n .post_exec_child_all(state, input, exit_kind)\n }\n\n", "metadata": {"chunk_id": "doc_1_chunk_6", "original_index": 6, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_6"}, "type": "Document"} +{"page_content": " /// Returns true if a `stdout` observer was added to the list\n #[inline]\n fn observes_stdout(&self) -> bool {\n self.primary.as_ref().observes_stdout() || self.secondary.as_ref().observes_stdout()\n }\n /// Returns true if a `stderr` observer was added to the list\n #[inline]\n fn observes_stderr(&self) -> bool {\n self.primary.as_ref().observes_stderr() || self.secondary.as_ref().observes_stderr()\n }\n\n", "metadata": {"chunk_id": "doc_1_chunk_7", "original_index": 7, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_7"}, "type": "Document"} +{"page_content": " /// Runs `observe_stdout` for all stdout observers in the list\n fn observe_stdout(&mut self, stdout: &[u8]) {\n self.primary.as_mut().observe_stderr(stdout);\n self.secondary.as_mut().observe_stderr(stdout);\n }\n\n /// Runs `observe_stderr` for all stderr observers in the list\n fn observe_stderr(&mut self, stderr: &[u8]) {\n self.primary.as_mut().observe_stderr(stderr);\n self.secondary.as_mut().observe_stderr(stderr);\n }\n}\n\n", "metadata": {"chunk_id": "doc_1_chunk_8", "original_index": 8, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_8"}, "type": "Document"} +{"page_content": "impl MatchName for ProxyObserversTuple\nwhere\n A: MatchName,\n B: MatchName,\n DOT: MatchName,\n{\n fn match_name(&self, name: &str) -> Option<&T> {\n if let Some(t) = self.primary.as_ref().match_name::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_ref().match_name::(name) {\n Some(t)\n } else {\n self.differential.match_name::(name)\n }\n }\n fn match_name_mut(&mut self, name: &str) -> Option<&mut T> {\n if let Some(t) = self.primary.as_mut().match_name_mut::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_mut().match_name_mut::(name) {\n Some(t)\n } else {\n self.differential.match_name_mut::(name)\n }\n }\n}\n\n", "metadata": {"chunk_id": "doc_1_chunk_9", "original_index": 9, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_9"}, "type": "Document"} +{"page_content": "impl ProxyObserversTuple {\n fn set(&mut self, primary: &A, secondary: &B) {\n self.primary = OwnedMutPtr::Ptr(ptr::from_ref(primary) as *mut A);\n self.secondary = OwnedMutPtr::Ptr(ptr::from_ref(secondary) as *mut B);\n }\n}\n\nimpl UsesObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n type Observers = ProxyObserversTuple;\n}\n\nimpl UsesState for DiffExecutor\nwhere\n A: UsesState,\n B: UsesState,\n{\n type State = A::State;\n}\n\n", "metadata": {"chunk_id": "doc_1_chunk_10", "original_index": 10, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_10"}, "type": "Document"} +{"page_content": "impl HasObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n #[inline]\n fn observers(&self) -> &ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_ref().unwrap()\n }\n }\n\n", "metadata": {"chunk_id": "doc_1_chunk_11", "original_index": 11, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_11"}, "type": "Document"} +{"page_content": " #[inline]\n fn observers_mut(&mut self) -> &mut ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_mut().unwrap()\n }\n }\n}\n", "metadata": {"chunk_id": "doc_1_chunk_12", "original_index": 12, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_12"}, "type": "Document"} +{"page_content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n", "metadata": {"chunk_id": "doc_2_chunk_0", "original_index": 0, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_0"}, "type": "Document"} +{"page_content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n", "metadata": {"chunk_id": "doc_2_chunk_1", "original_index": 1, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_1"}, "type": "Document"} +{"page_content": " #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n", "metadata": {"chunk_id": "doc_2_chunk_2", "original_index": 2, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_2"}, "type": "Document"} +{"page_content": " // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n", "metadata": {"chunk_id": "doc_2_chunk_3", "original_index": 3, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_3"}, "type": "Document"} +{"page_content": " // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n", "metadata": {"chunk_id": "doc_2_chunk_4", "original_index": 4, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_4"}, "type": "Document"} +{"page_content": " // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n", "metadata": {"chunk_id": "doc_2_chunk_5", "original_index": 5, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_5"}, "type": "Document"} +{"page_content": " // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "metadata": {"chunk_id": "doc_2_chunk_6", "original_index": 6, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_6"}, "type": "Document"} +{"page_content": "use core::{ffi::c_void, fmt::Debug};\nuse std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};\n\nuse libafl::{\n events::EventFirer,\n executors::ExitKind,\n feedbacks::Feedback,\n inputs::UsesInput,\n observers::{Observer, ObserversTuple},\n state::State,\n Error,\n};\nuse libafl_bolts::Named;\nuse libc::SIGABRT;\nuse serde::{Deserialize, Serialize};\n\nextern \"C\" {\n fn libafl_check_malloc_size(ptr: *const c_void) -> usize;\n}\n\nstatic RUNNING: AtomicBool = AtomicBool::new(false);\nstatic OOMED: AtomicBool = AtomicBool::new(false);\nstatic RSS_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n// 2GB, which is the default\nstatic MALLOC_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n\nstatic MALLOC_SIZE: AtomicUsize = AtomicUsize::new(0);\n\n", "metadata": {"chunk_id": "doc_3_chunk_0", "original_index": 0, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_0"}, "type": "Document"} +{"page_content": "/// malloc hook which will be invoked if address sanitizer is present. Used to detect if the target makes a malloc call\n/// that will exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid freshly allocated pointers backed by allocations of `size`.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_malloc_hook(ptr: *const c_void, size: usize) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = match unsafe { libafl_check_malloc_size(ptr) } {\n 0 => size, // either the malloc size function didn't work or it's really zero-sized\n real => real,\n };\n\n", "metadata": {"chunk_id": "doc_3_chunk_1", "original_index": 1, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_1"}, "type": "Document"} +{"page_content": " let total = MALLOC_SIZE.fetch_add(size, Ordering::Relaxed) + size;\n if (size > MALLOC_MAX.load(Ordering::Relaxed) || total > RSS_MAX.load(Ordering::Relaxed))\n && !OOMED.swap(true, Ordering::Relaxed)\n {\n unsafe {\n // we need to kill the process in a way that immediately triggers the crash handler\n libc::raise(SIGABRT);\n }\n }\n }\n}\n\n", "metadata": {"chunk_id": "doc_3_chunk_2", "original_index": 2, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_2"}, "type": "Document"} +{"page_content": "/// free hook which will be invoked if ASAN is present. Used to detect if the target makes a malloc call that will\n/// exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid allocated pointers, about to be freed.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_free_hook(ptr: *const c_void) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = unsafe { libafl_check_malloc_size(ptr) };\n MALLOC_SIZE\n .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |existing| {\n Some(existing.saturating_sub(size))\n })\n .expect(\"must complete successfully\");\n }\n}\n\n", "metadata": {"chunk_id": "doc_3_chunk_3", "original_index": 3, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_3"}, "type": "Document"} +{"page_content": "const OOM_OBS_NAME: &str = \"libfuzzer-like-oom\";\n\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n\nimpl Named for OomObserver {\n // strictly one name to prevent two from being registered\n fn name(&self) -> &str {\n OOM_OBS_NAME\n }\n}\n\n", "metadata": {"chunk_id": "doc_3_chunk_4", "original_index": 4, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_4"}, "type": "Document"} +{"page_content": "impl Observer for OomObserver\nwhere\n S: UsesInput,\n{\n fn pre_exec(&mut self, _state: &mut S, _input: &S::Input) -> Result<(), Error> {\n OOMED.store(false, Ordering::Relaxed);\n // must reset for platforms which do not offer malloc tracking\n MALLOC_SIZE.store(0, Ordering::Relaxed);\n RUNNING.store(true, Ordering::Relaxed);\n Ok(())\n }\n\n fn post_exec(\n &mut self,\n _state: &mut S,\n _input: &S::Input,\n _exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n RUNNING.store(false, Ordering::Relaxed);\n self.oomed = OOMED.load(Ordering::Relaxed);\n Ok(())\n }\n\n fn pre_exec_child(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.pre_exec(state, input)\n }\n\n fn post_exec_child(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.post_exec(state, input, exit_kind)\n }\n}\n\n", "metadata": {"chunk_id": "doc_3_chunk_5", "original_index": 5, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_5"}, "type": "Document"} +{"page_content": "/// Feedback for the similarly named [`OomObserver`] to detect if the target crashed due to an observed OOM\n#[derive(Debug, Serialize, Deserialize, Copy, Clone, Default)]\npub struct OomFeedback;\n\nimpl OomFeedback {\n /// Whether the target OOM'd in the last execution\n pub fn oomed() -> bool {\n OOMED.load(Ordering::Relaxed)\n }\n}\n\nimpl Named for OomFeedback {\n fn name(&self) -> &str {\n \"oom\"\n }\n}\n\nimpl Feedback for OomFeedback\nwhere\n S: State,\n{\n fn is_interesting(\n &mut self,\n _state: &mut S,\n _manager: &mut EM,\n _input: &S::Input,\n _observers: &OT,\n _exit_kind: &ExitKind,\n ) -> Result\n where\n EM: EventFirer,\n OT: ObserversTuple,\n {\n Ok(Self::oomed())\n }\n}\n", "metadata": {"chunk_id": "doc_3_chunk_6", "original_index": 6, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_6"}, "type": "Document"} +{"page_content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", "metadata": {"chunk_id": "doc_4_chunk_0", "original_index": 0, "pid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f_0"}, "type": "Document"} +{"page_content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", "metadata": {"chunk_id": "doc_5_chunk_0", "original_index": 0, "pid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95_0"}, "type": "Document"} +{"page_content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n", "metadata": {"chunk_id": "doc_6_chunk_0", "original_index": 0, "pid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_0"}, "type": "Document"} +{"page_content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n", "metadata": {"chunk_id": "doc_6_chunk_1", "original_index": 1, "pid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_1"}, "type": "Document"} +{"page_content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "metadata": {"chunk_id": "doc_6_chunk_2", "original_index": 2, "pid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_2"}, "type": "Document"} +{"page_content": "use std::sync::OnceLock;\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::aarch64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n X0 = 0,\n X1 = 1,\n X2 = 2,\n X3 = 3,\n X4 = 4,\n X5 = 5,\n X6 = 6,\n X7 = 7,\n X8 = 8,\n X9 = 9,\n X10 = 10,\n X11 = 11,\n X12 = 12,\n X13 = 13,\n X14 = 14,\n X15 = 15,\n X16 = 16,\n X17 = 17,\n X18 = 18,\n X19 = 19,\n X20 = 20,\n X21 = 21,\n X22 = 22,\n X23 = 23,\n X24 = 24,\n X25 = 25,\n X26 = 26,\n X27 = 27,\n X28 = 28,\n X29 = 29,\n X30 = 30,\n Sp = 31,\n Pc = 32,\n Pstate = 33,\n}\n\n", "metadata": {"chunk_id": "doc_7_chunk_0", "original_index": 0, "pid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_0"}, "type": "Document"} +{"page_content": "static BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::X0,\n BackdoorArgs::Cmd => Regs::X0,\n BackdoorArgs::Arg1 => Regs::X1,\n BackdoorArgs::Arg2 => Regs::X2,\n BackdoorArgs::Arg3 => Regs::X3,\n BackdoorArgs::Arg4 => Regs::X4,\n BackdoorArgs::Arg5 => Regs::X5,\n BackdoorArgs::Arg6 => Regs::X6,\n }\n })\n}\n\n", "metadata": {"chunk_id": "doc_7_chunk_1", "original_index": 1, "pid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_1"}, "type": "Document"} +{"page_content": "/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Fp: Regs = Regs::X29;\n pub const Lr: Regs = Regs::X30;\n}\n\n/// Return an ARM64 ArchCapstoneBuilder\npub fn capstone() -> capstone::arch::arm64::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .arm64()\n .mode(capstone::arch::arm64::ArchMode::Arm)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n self.read_reg(Regs::Lr)\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n self.write_reg(Regs::Lr, val)\n }\n\n", "metadata": {"chunk_id": "doc_7_chunk_2", "original_index": 2, "pid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_2"}, "type": "Document"} +{"page_content": " fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::X0,\n 1 => Regs::X1,\n 2 => Regs::X2,\n 3 => Regs::X3,\n 4 => Regs::X4,\n 5 => Regs::X5,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n", "metadata": {"chunk_id": "doc_7_chunk_3", "original_index": 3, "pid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_3"}, "type": "Document"} +{"page_content": " fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::X0, val),\n 1 => self.write_reg(Regs::X1, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n", "metadata": {"chunk_id": "doc_7_chunk_4", "original_index": 4, "pid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_4"}, "type": "Document"} +{"page_content": "use std::{mem::size_of, sync::OnceLock};\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::x86_64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n Rax = 0,\n Rbx = 1,\n Rcx = 2,\n Rdx = 3,\n Rsi = 4,\n Rdi = 5,\n Rbp = 6,\n Rsp = 7,\n R8 = 8,\n R9 = 9,\n R10 = 10,\n R11 = 11,\n R12 = 12,\n R13 = 13,\n R14 = 14,\n R15 = 15,\n Rip = 16,\n Rflags = 17,\n}\n\nstatic BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\n", "metadata": {"chunk_id": "doc_8_chunk_0", "original_index": 0, "pid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_0"}, "type": "Document"} +{"page_content": "pub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::Rax,\n BackdoorArgs::Cmd => Regs::Rax,\n BackdoorArgs::Arg1 => Regs::Rdi,\n BackdoorArgs::Arg2 => Regs::Rsi,\n BackdoorArgs::Arg3 => Regs::Rdx,\n BackdoorArgs::Arg4 => Regs::R10,\n BackdoorArgs::Arg5 => Regs::R8,\n BackdoorArgs::Arg6 => Regs::R9,\n }\n })\n}\n\n/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Sp: Regs = Regs::Rsp;\n pub const Pc: Regs = Regs::Rip;\n}\n\n/// Return an X86 `ArchCapstoneBuilder`\n#[must_use]\npub fn capstone() -> capstone::arch::x86::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .x86()\n .mode(capstone::arch::x86::ArchMode::Mode64)\n}\n\npub type GuestReg = u64;\n\n", "metadata": {"chunk_id": "doc_8_chunk_1", "original_index": 1, "pid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_1"}, "type": "Document"} +{"page_content": "impl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let mut ret_addr = [0; size_of::()];\n unsafe { self.read_mem(stack_ptr, &mut ret_addr) };\n Ok(GuestReg::from_le_bytes(ret_addr).into())\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let val: GuestReg = val.into();\n let ret_addr = val.to_le_bytes();\n unsafe { self.write_mem(stack_ptr, &ret_addr) };\n Ok(())\n }\n\n", "metadata": {"chunk_id": "doc_8_chunk_2", "original_index": 2, "pid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_2"}, "type": "Document"} +{"page_content": " fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::Rdi,\n 1 => Regs::Rsi,\n 2 => Regs::Rdx,\n 3 => Regs::Rcx,\n 4 => Regs::R8,\n 5 => Regs::R9,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n", "metadata": {"chunk_id": "doc_8_chunk_3", "original_index": 3, "pid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_3"}, "type": "Document"} +{"page_content": " fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::Rdi, val),\n 1 => self.write_reg(Regs::Rsi, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n", "metadata": {"chunk_id": "doc_8_chunk_4", "original_index": 4, "pid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_4"}, "type": "Document"} +{"page_content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\n", "metadata": {"chunk_id": "doc_9_chunk_0", "original_index": 0, "pid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_0"}, "type": "Document"} +{"page_content": "impl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n", "metadata": {"chunk_id": "doc_9_chunk_1", "original_index": 1, "pid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_1"}, "type": "Document"} +{"page_content": " /// Create an empty [`Input`]\n #[must_use]\n pub fn empty() -> Self {\n Self {\n tree: Tree {\n rules: vec![],\n sizes: vec![],\n paren: vec![],\n },\n }\n }\n\n /// Generate a `Nautilus` input from the given bytes\n pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) {\n bytes.clear();\n self.tree.unparse(NodeID::from(0), &context.ctx, bytes);\n }\n\n", "metadata": {"chunk_id": "doc_9_chunk_2", "original_index": 2, "pid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_2"}, "type": "Document"} +{"page_content": " /// Get the tree representation of this input\n #[must_use]\n pub fn tree(&self) -> &Tree {\n &self.tree\n }\n\n /// Get the tree representation of this input, as a mutable reference\n #[must_use]\n pub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n }\n}\n\nimpl Hash for NautilusInput {\n fn hash(&self, state: &mut H) {\n self.tree().paren.hash(state);\n for r in &self.tree().rules {\n match r {\n RuleIDOrCustom::Custom(a, b) => {\n a.hash(state);\n b.hash(state);\n }\n RuleIDOrCustom::Rule(a) => a.hash(state),\n }\n }\n self.tree().sizes.hash(state);\n }\n}\n\n", "metadata": {"chunk_id": "doc_9_chunk_3", "original_index": 3, "pid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_3"}, "type": "Document"} +{"page_content": "/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n", "metadata": {"chunk_id": "doc_9_chunk_4", "original_index": 4, "pid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_4"}, "type": "Document"} +{"page_content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", "metadata": {"chunk_id": "doc_10_chunk_0", "original_index": 0, "pid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2_0"}, "type": "Document"} +{"page_content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n", "metadata": {"chunk_id": "doc_11_chunk_0", "original_index": 0, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_0"}, "type": "Document"} +{"page_content": " _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n", "metadata": {"chunk_id": "doc_11_chunk_1", "original_index": 1, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_1"}, "type": "Document"} +{"page_content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n", "metadata": {"chunk_id": "doc_11_chunk_2", "original_index": 2, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_2"}, "type": "Document"} +{"page_content": " # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n", "metadata": {"chunk_id": "doc_11_chunk_3", "original_index": 3, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_3"}, "type": "Document"} +{"page_content": " continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n", "metadata": {"chunk_id": "doc_11_chunk_4", "original_index": 4, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_4"}, "type": "Document"} +{"page_content": " # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n", "metadata": {"chunk_id": "doc_11_chunk_5", "original_index": 5, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_5"}, "type": "Document"} +{"page_content": " # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n", "metadata": {"chunk_id": "doc_11_chunk_6", "original_index": 6, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_6"}, "type": "Document"} +{"page_content": " self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n", "metadata": {"chunk_id": "doc_11_chunk_7", "original_index": 7, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_7"}, "type": "Document"} +{"page_content": " target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n", "metadata": {"chunk_id": "doc_11_chunk_8", "original_index": 8, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_8"}, "type": "Document"} +{"page_content": " def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "metadata": {"chunk_id": "doc_11_chunk_9", "original_index": 9, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_9"}, "type": "Document"} +{"page_content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n", "metadata": {"chunk_id": "doc_12_chunk_0", "original_index": 0, "pid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_0"}, "type": "Document"} +{"page_content": " return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", "metadata": {"chunk_id": "doc_12_chunk_1", "original_index": 1, "pid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_1"}, "type": "Document"} +{"page_content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n", "metadata": {"chunk_id": "doc_13_chunk_0", "original_index": 0, "pid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_0"}, "type": "Document"} +{"page_content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "metadata": {"chunk_id": "doc_13_chunk_1", "original_index": 1, "pid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_1"}, "type": "Document"} +{"page_content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", "metadata": {"chunk_id": "doc_14_chunk_0", "original_index": 0, "pid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad_0"}, "type": "Document"} +{"page_content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Morse_code(Decoder[str]):\n # A priority list for char/word boundaries\n BOUNDARIES = {\" \": 1, \"/\": 2, \"\\n\": 3}\n PURGE = {ord(c): None for c in BOUNDARIES.keys()}\n MAX_PRIORITY = 3\n ALLOWED = {\".\", \"-\", \" \", \"/\", \"\\n\"}\n MORSE_CODE_DICT: Dict[str, str]\n MORSE_CODE_DICT_INV: Dict[str, str]\n\n def decode(self, ctext: T) -> Optional[U]:\n logging.debug(\"Attempting Morse code decoder\")\n\n char_boundary = word_boundary = None\n\n", "metadata": {"chunk_id": "doc_15_chunk_0", "original_index": 0, "pid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_0"}, "type": "Document"} +{"page_content": " char_boundary = word_boundary = None\n char_priority = word_priority = 0\n # Custom loop allows early break\n for i in ctext:\n i_priority = self.BOUNDARIES.get(i)\n if i_priority is None:\n if i in self.ALLOWED:\n continue\n logging.debug(f\"Non-morse char '{i}' found\")\n return None\n\n", "metadata": {"chunk_id": "doc_15_chunk_1", "original_index": 1, "pid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_1"}, "type": "Document"} +{"page_content": " if i_priority <= char_priority or i == char_boundary or i == word_boundary:\n continue\n # Default to having a char boundary over a word boundary\n if (\n i_priority > word_priority\n and word_boundary is None\n and char_boundary is not None\n ):\n word_priority = i_priority\n word_boundary = i\n continue\n char_priority = i_priority\n char_boundary = i\n\n logging.debug(\n f\"Char boundary is unicode {ord(char_boundary)}, and word boundary is unicode {ord(word_boundary) if word_boundary is not None else None}\"\n )\n\n result = \"\"\n\n", "metadata": {"chunk_id": "doc_15_chunk_2", "original_index": 2, "pid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_2"}, "type": "Document"} +{"page_content": " for word in ctext.split(word_boundary) if word_boundary else [ctext]:\n logging.debug(f\"Attempting to decode word {word}\")\n for char in word.split(char_boundary):\n char = char.translate(self.PURGE)\n if len(char) == 0:\n continue\n try:\n m = self.MORSE_CODE_DICT_INV[char]\n except KeyError:\n logging.debug(f\"Invalid codeword '{char}' found\")\n return None\n result = result + m\n # after every word add a space\n result = result + \" \"\n if len(result) == 0:\n logging.debug(\"Morse code failed to match\")\n return None\n # Remove trailing space\n result = result[:-1]\n logging.info(f\"Morse code successful, returning {result}\")\n return result.strip().upper()\n\n", "metadata": {"chunk_id": "doc_15_chunk_3", "original_index": 3, "pid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_3"}, "type": "Document"} +{"page_content": " @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.MORSE_CODE_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.MORSE_CODE_DICT_INV = {v: k for k, v in self.MORSE_CODE_DICT.items()}\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The morse code dictionary to use\",\n req=False,\n default=\"cipheydists::translate::morse\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"morse_code\"\n", "metadata": {"chunk_id": "doc_15_chunk_4", "original_index": 4, "pid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_4"}, "type": "Document"} +{"page_content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n", "metadata": {"chunk_id": "doc_16_chunk_0", "original_index": 0, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_0"}, "type": "Document"} +{"page_content": " def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n", "metadata": {"chunk_id": "doc_16_chunk_1", "original_index": 1, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_1"}, "type": "Document"} +{"page_content": " # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n", "metadata": {"chunk_id": "doc_16_chunk_2", "original_index": 2, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_2"}, "type": "Document"} +{"page_content": " sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n", "metadata": {"chunk_id": "doc_16_chunk_3", "original_index": 3, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_3"}, "type": "Document"} +{"page_content": " def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n", "metadata": {"chunk_id": "doc_16_chunk_4", "original_index": 4, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_4"}, "type": "Document"} +{"page_content": " for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "metadata": {"chunk_id": "doc_16_chunk_5", "original_index": 5, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_5"}, "type": "Document"} +{"page_content": "# by https://github.com/RustyDucky and https://github.com/lukasgabriel\n\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Tap_code(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Tap code decoding\n \"\"\"\n try:\n result = \"\"\n combinations = ctext.split(\" \")\n for fragment in combinations:\n result += self.TABLE.get(fragment)\n return result\n except Exception:\n return None\n\n", "metadata": {"chunk_id": "doc_17_chunk_0", "original_index": 0, "pid": "44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3_0"}, "type": "Document"} +{"page_content": " @staticmethod\n def priority() -> float:\n return 0.06\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.TABLE = config.get_resource(self._params()[\"dict\"], Translation)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The table of letters used for the tap code interpretation.\",\n req=False,\n default=\"cipheydists::translate::tap_code\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"tap_code\"\n", "metadata": {"chunk_id": "doc_17_chunk_1", "original_index": 1, "pid": "44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3_1"}, "type": "Document"} +{"page_content": "from functools import lru_cache\nfrom typing import Any, Dict, Optional, Set\n\nimport cipheydists\nimport logging\n\nfrom ciphey.iface import (\n Config,\n Distribution,\n ParamSpec,\n ResourceLoader,\n Translation,\n WordList,\n registry,\n)\n\n\n@registry.register_multi(WordList, Distribution, Translation)\nclass CipheyDists(ResourceLoader):\n # _wordlists: Set[str] = frozenset({\"english\", \"english1000\", \"englishStopWords\"})\n # _brandons: Set[str] = frozenset({\"english\"})\n # _dists: Set[str] = frozenset({\"twist\"})\n # _translates: Set[str] = frozenset({\"morse\"})\n _getters = {\n \"list\": cipheydists.get_list,\n \"dist\": cipheydists.get_dist,\n \"brandon\": cipheydists.get_brandon,\n \"translate\": cipheydists.get_translate,\n }\n\n", "metadata": {"chunk_id": "doc_18_chunk_0", "original_index": 0, "pid": "bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2_0"}, "type": "Document"} +{"page_content": " def whatResources(self) -> Optional[Set[str]]:\n pass\n\n @lru_cache()\n def getResource(self, name: str) -> Any:\n logging.debug(f\"Loading cipheydists resource {name}\")\n prefix, name = name.split(\"::\", 1)\n return self._getters[prefix](name)\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n", "metadata": {"chunk_id": "doc_18_chunk_1", "original_index": 1, "pid": "bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2_1"}, "type": "Document"} +{"page_content": "# Translated to Python and adapted for Ciphey from the JS original at https://github.com/pshihn/base69\n\n\nimport re\nfrom math import ceil\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, WordList, registry\n\n\n@registry.register\nclass Base69(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base69 decoding\n \"\"\"\n # Remove whitespace\n try:\n ctext = re.sub(r\"\\s+\", \"\", ctext, flags=re.UNICODE)\n extra_bytes = 0\n clen = len(ctext)\n\n if ctext[:-1] == \"=\":\n extra_bytes = int(ctext[clen - 2])\n\n CHUNK_COUNT = ceil(clen / 16)\n result = [0 for _ in range(CHUNK_COUNT * 7 - extra_bytes)]\n\n", "metadata": {"chunk_id": "doc_19_chunk_0", "original_index": 0, "pid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_0"}, "type": "Document"} +{"page_content": " for i in range(CHUNK_COUNT):\n chunk_string = ctext[i * 16 : (i + 1) * 16]\n if extra_bytes and (i == CHUNK_COUNT - 1):\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert[0 : 7 - extra_bytes]):\n result[n + i * 7] = elem\n else:\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert):\n result[n + i * 7] = elem % 256\n return bytearray(result).decode().strip(\"\\x00\")\n except Exception:\n return None\n\n", "metadata": {"chunk_id": "doc_19_chunk_1", "original_index": 1, "pid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_1"}, "type": "Document"} +{"page_content": " def decode_chunk(self, s: str):\n padded_bytes = s.endswith(\"=\")\n\n decoded = [0 for _ in range(8)]\n for i in range(8):\n decoded[i] = (\n 0\n if i == 7 and padded_bytes\n else self.chars_to_byte(s[i * 2 : i * 2 + 2])\n )\n\n result = [0 for _ in range(7)]\n for i in range(7):\n t1 = decoded[i] << (i + 1)\n t2 = decoded[i + 1] >> (7 - i - 1)\n result[i] = t1 | t2\n return result\n\n", "metadata": {"chunk_id": "doc_19_chunk_2", "original_index": 2, "pid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_2"}, "type": "Document"} +{"page_content": " def chars_to_byte(self, s: str):\n return (69 * self.CHARS.index(s[1])) + (self.CHARS.index(s[0]))\n\n @staticmethod\n def priority() -> float:\n # If this becomes lower or equal to the reverse, it breaks.\n # So I'll set it to 0.2 for now since it is very fast anyways.\n return 0.2\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.CHARS = config.get_resource(self._params()[\"dict\"], WordList)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The charset used for the decoder.\",\n req=False,\n default=\"cipheydists::list::base69\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"base69\"\n", "metadata": {"chunk_id": "doc_19_chunk_3", "original_index": 3, "pid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_3"}, "type": "Document"} +{"page_content": "import pytest\n\nfrom ciphey import decrypt\nfrom ciphey.iface import Config\n\nanswer_str = \"Hello my name is bee and I like dog and apple and tree\"\n\n\ndef test_a1z26():\n res = decrypt(\n Config().library_default().complete_config(),\n \"8 5 12 12 15 13 25 14 1 13 5 9 19 2 5 5 1 14 4 9 12 9 11 5 4 15 7 1 14 4 1 16 16 12 5 1 14 4 20 18 5 5\",\n )\n assert res == \"hellomynameisbeeandilikedogandappleandtree\"\n\n", "metadata": {"chunk_id": "doc_20_chunk_0", "original_index": 0, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_0"}, "type": "Document"} +{"page_content": "\ndef test_affine():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Ihsst bf kxbh rd ghh xky R srjh ytz xky xccsh xky muhh\",\n )\n assert res == answer_str\n\n\ndef test_ascii_shift():\n res = decrypt(\n Config().library_default().complete_config(),\n '\"?FFIzGSzH;G?zCMzz#zFCE?z>IAz;H>z;JJF?z;H>zNL??',\n )\n assert res == answer_str\n\n\ndef test_atbash():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Svool nb mznv rh yvv zmw R orpv wlt zmw zkkov zmw givv\",\n )\n assert res == answer_str\n\n", "metadata": {"chunk_id": "doc_20_chunk_1", "original_index": 1, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_1"}, "type": "Document"} +{"page_content": "\ndef test_baconian_complete_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABB ABABB ABBBA ABBAA BBAAA ABBAB AAAAA ABBAA AABAA ABAAA BAABA AAAAB AABAA AABAA AAAAA ABBAB AAABB ABAAA ABABB ABAAA ABABA AABAA AAABB ABBBA AABBA AAAAA ABBAB AAABB AAAAA ABBBB ABBBB ABABB AABAA AAAAA ABBAB AAABB BAABB BAAAB AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n", "metadata": {"chunk_id": "doc_20_chunk_2", "original_index": 2, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_2"}, "type": "Document"} +{"page_content": "\ndef test_baconian_standard_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABA ABABA ABBAB ABABB BABBA ABBAA AAAAA ABABB AABAA ABAAA BAAAB AAAAB AABAA AABAA AAAAA ABBAA AAABB ABAAA ABABA ABAAA ABAAB AABAA AAABB ABBAB AABBA AAAAA ABBAA AAABB AAAAA ABBBA ABBBA ABABA AABAA AAAAA ABBAA AAABB BAABA BAAAA AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n", "metadata": {"chunk_id": "doc_20_chunk_3", "original_index": 3, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_3"}, "type": "Document"} +{"page_content": "\ndef test_base32():\n res = decrypt(\n Config().library_default().complete_config(),\n \"JBSWY3DPEBWXSIDOMFWWKIDJOMQGEZLFEBQW4ZBAJEQGY2LLMUQGI33HEBQW4ZBAMFYHA3DFEBQW4ZBAORZGKZI=\",\n )\n assert res == answer_str\n\n\ndef test_base58_bitcoin():\n res = decrypt(\n Config().library_default().complete_config(),\n \"6qYhNwsP46Mn4gy6gyANfsMm2icAxGFA6gnFjVm9phYHeby7PZm3vthiXxSU77teQgTFGbHETn\",\n )\n assert res == answer_str\n\n\ndef test_base58_ripple():\n res = decrypt(\n Config().library_default().complete_config(),\n \"aqY64A1PhaM8hgyagyw4C1Mmp5cwxGEwag8EjVm9F6YHebyfPZmsvt65XxS7ffteQgTEGbHNT8\",\n )\n assert res == answer_str\n\n", "metadata": {"chunk_id": "doc_20_chunk_4", "original_index": 4, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_4"}, "type": "Document"} +{"page_content": "\ndef test_base62():\n res = decrypt(\n Config().library_default().complete_config(),\n \"2mQvnz9Yevvb7DRCuyDltsP31vJLToR5pjE9orWkzHMUsht2kbC96PLbZ1sdIocsGHENrzC2n\",\n )\n assert res == answer_str\n\n\ndef test_base64():\n res = decrypt(\n Config().library_default().complete_config(),\n \"SGVsbG8gbXkgbmFtZSBpcyBiZWUgYW5kIEkgbGlrZSBkb2cgYW5kIGFwcGxlIGFuZCB0cmVl\",\n )\n\n assert res == answer_str\n\n", "metadata": {"chunk_id": "doc_20_chunk_5", "original_index": 5, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_5"}, "type": "Document"} +{"page_content": "\ndef test_base69():\n res = decrypt(\n Config().library_default().complete_config(),\n \"kAZAtABBeB8A-AoB8ADBNAhBLA1AFBgA0AXBfBGATAVAFBgAwAWBHBu7ARt\",\n )\n assert res == answer_str\n\n\ndef test_base91():\n res = decrypt(\n Config().library_default().complete_config(),\n \">OwJh>=/fV@$x88j9ZNKB*ge$yV%lE%ZKi,+<]>-.-[+>-----<]>++.+++++++..+++.+[+>++<]>.[++>+<]>---.--[+++>-<]>.-[+>++++<]>.[++>+<]>--.-[+++>++<]>-.+[-->---<]>.--------.[+++++>+<]>+.-[+++>--<]>-.++++++++++.---[+>++<]>.[+++>-<]>++.+++..[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.-[+++>+<]>--.-[+>----<]>.++[+++>--<]>.---.++.------.[+++++>+<]>+.+[+>---<]>+.+++++++++++.--------.-[+++>-<]>--.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[+++>-<]>+.-[-->---<]>..----.-------.[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[++>+<]>++++.--.-------------..\",\n )\n assert res == answer_str\n\n", "metadata": {"chunk_id": "doc_20_chunk_10", "original_index": 10, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_10"}, "type": "Document"} +{"page_content": "\ndef test_brandon():\n res = decrypt(\n Config().library_default().complete_config(),\n \"R hvv blf tzgsvi yvuliv nv...sfmtib...gviirurvw... Xofgxsrmt blfi yzyvh gl blfi yivzhg. Vnkvili Vnsbi srh nzixsvw srh ovtrlmh rmgl lfi ozmwh... Ozrw hrvtv gl vevib uligivhh uiln sviv gl gsv Yofv Nlfmgzrmh. Izyrw zmw izevmlfh, sv yrgvh zmw yrgvh zdzb. Nvm lu gsv Mligs, blf hgzmw zg gsv kivxrkrxv. Blfi prmth szev uzrovw blf, hl mld blf gfim gl gsv tlwh! Zmw bvg blf wl mlg kovzw? Blf wl mlg pmvvo gl wfhg blfi svzwh drgs zhs? Rmhgvzw blf dzro, Dsb szev gsv tlwh ulihzpvm fh? Dv nfhg ollp rmgl gsv girzoh dv uzrovw olmt ztl! Rm z grnv kzhhvw, ", "metadata": {"chunk_id": "doc_20_chunk_11", "original_index": 11, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_11"}, "type": "Document"} +{"page_content": "lfi dliow rmgvigdrmvw drgs zmlgsvi gsilfts zm fksvzezo hxslozih xzoo gsv Xlmqfmxgrlm lu gsv Hksvivh... Gsv tlwh zooldvw fmslob ulixvh gl hork rmgl lfi wlnzrm. Gsv luuhkirmt lu gszg xzgzxobhn dzh gsv mvuvirlfh ulixv xzoovw nztrx... Bvg dv wrw mlg yzmrhs rg, rmhgvzw hgfwbrmt gsv erov zixzmv uli lfi kldvi zmw dvzogs! Zmw gsv nlmhgvih zg lfi wlli...gsv fmslob ivorxgh lu gsrh Xlmqfmxgrlm? ...gsv gilooh...gsv xlikhv vzgvih...gsv dvivdloevh? Wrw dv izrhv lfi hdliwh ztzrmhg gsvn? Li szev dv ozrw gsrh yfiwvm lm lgsvih? Lm hl-xzoovw drgxsvih? Hgizb xsrowivm gzftsg gsv dzbh lu ulfo hlixvib, ", "metadata": {"chunk_id": "doc_20_chunk_12", "original_index": 12, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_12"}, "type": "Document"} +{"page_content": "gsvri ylwrvh nfgzgvw gsilfts yozhksvnlfh irgfzo. Hvmg gl urtsg nlmhgvih gslfts gsvb xlfow mlg wrhgrmtfrhs tllw uiln vero. Gsv uorxpvi lu sfnzmrgb olmt vcgrmtfrhsvw drgsrm gsvn. Bvh, gsvri mfnyvih szev wdrmwovw gsilfts gsv bvzih. Yfg z uvd hgroo ilzn lfi ozmwh, luuvirmt gsvri yollwb dlip uli xlrm. Gl gsrh wzb gsvb hsznv fh drgs gsvri evib vcrhgvmxv! Gsv Mligs yovvwh, uolttvw yb dzi. Gsv yzggovh ziv gsv tlwh' dsrk, xszhgrhvnvmg uli lfi hrmh! Zmw ovg fh mlg ulitvg gsv gviilih, gsv hxlfitvh uiln yvblmw lfi dliow! Gsv Drow Sfmg irwvh gsv hpb drgs vevib ufoo nllm! Gsv wzip izrwvih zywfxg lfi xsrowivm rmgl ozmwh fmpmldm! Hlnv hzb gsvb svizow z hvxlmw Xlmqfmxgrlm! Xzm dv xszig z xlfihv yzxp rmgl gsv ortsg? Droo dv urmw gsv hgivmtgs gl yzmrhs gsv nztvh uiln lfi prmtwlnh? Fmrgv zilfmw gsv dzings lu gsv Vgvimzo Uriv? Mrts rh gsv Grnv lu gsv Hdliw zmw gsv Zcv! Mlmv droo urtsg gsrh dzi rm lfi hgvzw! Mrts rh gsv Grnv lu Nzwmvhh zmw Wrhwzrm!\",\n )\n assert bool(res) is True\n\n", "metadata": {"chunk_id": "doc_20_chunk_13", "original_index": 13, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_13"}, "type": "Document"} +{"page_content": "\ndef test_caesar():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Uryyb zl anzr vf orr naq V yvxr qbt naq nccyr naq gerr\",\n )\n assert res == answer_str\n\n\ndef test_decimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"72 101 108 108 111 32 109 121 32 110 97 109 101 32 105 115 32 98 101 101 32 97 110 100 32 73 32 108 105 107 101 32 100 111 103 32 97 110 100 32 97 112 112 108 101 32 97 110 100 32 116 114 101 101\",\n )\n assert res == answer_str\n\n\ndef test_dna():\n res = decrypt(\n Config().library_default().complete_config(),\n \"GAT AAT GCT ATT TCT ATT AAT ACT GAA CGT GAA TCT ACT ATT AAT GGT\",\n )\n assert res == \"DNAISINTERESTING\"\n\n\ndef test_dtmf():\n res = decrypt(\n Config().library_default().complete_config(),\n", "metadata": {"chunk_id": "doc_20_chunk_14", "original_index": 14, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_14"}, "type": "Document"} +{"page_content": " \"1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697\",", "metadata": {"chunk_id": "doc_20_chunk_15", "original_index": 15, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_15"}, "type": "Document"} +{"page_content": "\n )\n assert res == answer_str\n\n\ndef test_galactic():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᒷꖎꖎ𝙹 ᒲ|| リᔑᒲᒷ ╎ᓭ ʖᒷᒷ ᔑリ↸ i ꖎ╎ꖌᒷ ↸𝙹⊣ ᔑリ↸ ᔑ!¡!¡ꖎᒷ ᔑリ↸ ℸ ̣ ∷ᒷᒷ\",\n )\n assert res == answer_str.lower()\n\n\ndef test_galactic_Xproblem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᔑꖎ╎⎓ᔑ ̇/, ̇/||ꖎ𝙹!¡⍑𝙹リᒷ, ᔑ ̇/ ᔑꖎ𝙹リᒷ ᔑリ↸ ̇/ᒷ∷𝙹 ̇/ ⎓∷𝙹ᒲ 𝙹 ̇/⎓𝙹∷↸\",\n )\n assert res == \"halifax, xylophone, a x alone and xerox from oxford\"\n\n\ndef test_gzip():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H4sIAAzul18A/yXJzQmAMBSEwVa+ckwZT7LIw80P6sXuA3ocZpM9aC89msibXSJ6peA8RR3Hx5jTfzyXtAAbQvCyNgAAAA==\",\n )\n assert res == answer_str\n\n", "metadata": {"chunk_id": "doc_20_chunk_16", "original_index": 16, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_16"}, "type": "Document"} +{"page_content": "\ndef test_hexadecimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"48 65 6c 6c 6f 20 6d 79 20 6e 61 6d 65 20 69 73 20 62 65 65 20 61 6e 64 20 49 20 6c 69 6b 65 20 64 6f 67 20 61 6e 64 20 61 70 70 6c 65 20 61 6e 64 20 74 72 65 65\",\n )\n\n assert res == answer_str\n\n\ndef test_json_problem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"0110100001100101011011000110110001101111\",\n )\n assert res != \"0110100001100101011011000110110001101111\"\n\n\ndef test_leetspeak():\n res = decrypt(\n Config().library_default().complete_config(),\n \"|-|3ll0 my n4m3 1s 833 4nd 1 l1k3 D06 4ND 4ppl3 4nd 7R33\",\n )\n assert res.lower() == answer_str.lower()\n\n", "metadata": {"chunk_id": "doc_20_chunk_17", "original_index": 17, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_17"}, "type": "Document"} +{"page_content": "\ndef test_morse_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \".... . .-.. .-.. ---/-- -.--/-. .- -- ./.. .../-... . ./.- -. -../../.-.. .. -.- ./-.. --- --./.- -. -../.- .--. .--. .-.. ./.- -. -../- .-. . .\",\n )\n assert res == answer_str.upper()\n\n\ndef test_multi_tap():\n res = decrypt(\n Config().library_default().complete_config(),\n \"44 33 555 555 666 0 6 999 0 66 2 6 33 0 444 7777 0 22 33 33 0 2 66 3 0 444 0 555 444 55 33 0 3 666 4 0 2 66 3 0 2 7 7 555 33 0 2 66 3 0 8 777 33 33\",\n )\n assert res == answer_str.upper()\n\n", "metadata": {"chunk_id": "doc_20_chunk_18", "original_index": 18, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_18"}, "type": "Document"} +{"page_content": "\ndef test_new_line_at_start_returns():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"\\npass\\n\")\n\n assert res == \"\\npass\\n\"\n\n\ndef test_new_line_strip_and_return():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"pass\\n\")\n\n assert res == \"pass\\n\"\n\n\ndef test_octal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"110 145 154 154 157 40 155 171 40 156 141 155 145 40 151 163 40 142 145 145 40 141 156 144 40 111 40 154 151 153 145 40 144 157 147 40 141 156 144 40 141 160 160 154 145 40 141 156 144 40 164 162 145 145\",\n )\n assert res == answer_str\n\n", "metadata": {"chunk_id": "doc_20_chunk_19", "original_index": 19, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_19"}, "type": "Document"} +{"page_content": "\ndef test_plaintext():\n res = decrypt(Config().library_default().complete_config(), answer_str)\n assert res == answer_str\n\n\ndef test_quadgrams_messed_up_spacing():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\",\n )\n assert (\n res == \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\"\n )\n\n\ndef test_quadgrams_no_spaces():\n res = decrypt(\n Config().library_default().complete_config(),\n \"HellomynameisbeeandIlikedogandappleandtree\",\n )\n assert res == \"HellomynameisbeeandIlikedogandappleandtree\"\n\n", "metadata": {"chunk_id": "doc_20_chunk_20", "original_index": 20, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_20"}, "type": "Document"} +{"page_content": "\ndef test_quadgrams_space_between_every_letter():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\",\n )\n assert (\n res\n == \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\"\n )\n\n\ndef test_reversed_text():\n res = decrypt(\n Config().library_default().complete_config(),\n \"eert dna elppa dna god ekil I dna eeb si eman ym olleH\",\n )\n assert res == answer_str\n\n", "metadata": {"chunk_id": "doc_20_chunk_21", "original_index": 21, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_21"}, "type": "Document"} +{"page_content": "\ndef test_rot47():\n res = decrypt(\n Config().library_default().complete_config(),\n \"$A9:?I @7 3=24< BF2CEK[ ;F586 >J G@H\",\n )\n assert res == \"Sphinx of black quartz, judge my vow\"\n\n\ndef test_soundex():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H236 I200 I500 T000 P230\",\n )\n assert res.lower() == \"history is in the past\"\n\n\ndef test_tap_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \"4,4 1,5 4,3 4,4 3,4 3,3 1,5 4,4 5,2 3,4 4,4 2,3 4,2 1,5 1,5\",\n )\n assert res == \"test one two three\".upper()\n\n\ndef test_url():\n res = decrypt(\n Config().library_default().complete_config(),\n \"https%3A%2F%2Fwww%2Egoogle%2Ecom%2Fsearch%3Fq%3Dciphey\",\n )\n assert res == \"https://www.google.com/search?q=ciphey\"\n\n", "metadata": {"chunk_id": "doc_20_chunk_22", "original_index": 22, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_22"}, "type": "Document"} +{"page_content": "\ndef test_uuencode():\n res = decrypt(\n Config().library_default().complete_config(),\n 'begin 644 /dev/stdout\\nM2&5L;&\\\\@;7D@;F%M92!I\n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\n", "metadata": {"chunk_id": "doc_21_chunk_0", "original_index": 0, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_0"}, "type": "Document"} +{"page_content": "std::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n", "metadata": {"chunk_id": "doc_21_chunk_1", "original_index": 1, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_1"}, "type": "Document"} +{"page_content": " \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\n", "metadata": {"chunk_id": "doc_21_chunk_2", "original_index": 2, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_2"}, "type": "Document"} +{"page_content": "std::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n", "metadata": {"chunk_id": "doc_21_chunk_3", "original_index": 3, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_3"}, "type": "Document"} +{"page_content": " // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n", "metadata": {"chunk_id": "doc_21_chunk_4", "original_index": 4, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_4"}, "type": "Document"} +{"page_content": " // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\n", "metadata": {"chunk_id": "doc_21_chunk_5", "original_index": 5, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_5"}, "type": "Document"} +{"page_content": "std::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n", "metadata": {"chunk_id": "doc_21_chunk_6", "original_index": 6, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_6"}, "type": "Document"} +{"page_content": " std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\n", "metadata": {"chunk_id": "doc_21_chunk_7", "original_index": 7, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_7"}, "type": "Document"} +{"page_content": "std::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n", "metadata": {"chunk_id": "doc_21_chunk_8", "original_index": 8, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_8"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "metadata": {"chunk_id": "doc_22_chunk_0", "original_index": 0, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_0"}, "type": "Document"} +{"page_content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n", "metadata": {"chunk_id": "doc_22_chunk_1", "original_index": 1, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_1"}, "type": "Document"} +{"page_content": "private:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n", "metadata": {"chunk_id": "doc_22_chunk_2", "original_index": 2, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_2"}, "type": "Document"} +{"page_content": " ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n", "metadata": {"chunk_id": "doc_22_chunk_3", "original_index": 3, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_3"}, "type": "Document"} +{"page_content": " /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n", "metadata": {"chunk_id": "doc_22_chunk_4", "original_index": 4, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_4"}, "type": "Document"} +{"page_content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\n", "metadata": {"chunk_id": "doc_22_chunk_5", "original_index": 5, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_5"}, "type": "Document"} +{"page_content": "private:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n", "metadata": {"chunk_id": "doc_22_chunk_6", "original_index": 6, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_6"}, "type": "Document"} +{"page_content": " template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n", "metadata": {"chunk_id": "doc_22_chunk_7", "original_index": 7, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_7"}, "type": "Document"} +{"page_content": " template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n", "metadata": {"chunk_id": "doc_22_chunk_8", "original_index": 8, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_8"}, "type": "Document"} +{"page_content": " template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "metadata": {"chunk_id": "doc_22_chunk_9", "original_index": 9, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_9"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n", "metadata": {"chunk_id": "doc_23_chunk_0", "original_index": 0, "pid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_0"}, "type": "Document"} +{"page_content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "metadata": {"chunk_id": "doc_23_chunk_1", "original_index": 1, "pid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_1"}, "type": "Document"} +{"page_content": " /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "metadata": {"chunk_id": "doc_23_chunk_2", "original_index": 2, "pid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_2"}, "type": "Document"} +{"page_content": "#include \"type_parser.h\"\n\n#include \"clickhouse/exceptions.h\"\n#include \"clickhouse/base/platform.h\" // for _win_\n\n#include \n#include \n#include \n#include \n#include \n\n#if defined _win_\n#include \n#else\n#include \n#endif\n\n\nnamespace clickhouse {\n\nbool TypeAst::operator==(const TypeAst & other) const {\n return meta == other.meta\n && code == other.code\n && name == other.name\n && value == other.value\n && std::equal(elements.begin(), elements.end(), other.elements.begin(), other.elements.end());\n}\n\n", "metadata": {"chunk_id": "doc_24_chunk_0", "original_index": 0, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_0"}, "type": "Document"} +{"page_content": "static const std::unordered_map kTypeCode = {\n { \"Void\", Type::Void },\n { \"Int8\", Type::Int8 },\n { \"Int16\", Type::Int16 },\n { \"Int32\", Type::Int32 },\n { \"Int64\", Type::Int64 },\n { \"Bool\", Type::UInt8 },\n { \"UInt8\", Type::UInt8 },\n { \"UInt16\", Type::UInt16 },\n { \"UInt32\", Type::UInt32 },\n { \"UInt64\", Type::UInt64 },\n { \"Float32\", Type::Float32 },\n { \"Float64\", Type::Float64 },\n { \"String\", Type::String },\n { \"FixedString\", Type::FixedString },\n { \"DateTime\", Type::DateTime },\n { \"DateTime64\", Type::DateTime64 },\n", "metadata": {"chunk_id": "doc_24_chunk_1", "original_index": 1, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_1"}, "type": "Document"} +{"page_content": " { \"Date\", Type::Date },\n { \"Date32\", Type::Date32 },\n { \"Array\", Type::Array },\n { \"Nullable\", Type::Nullable },\n { \"Tuple\", Type::Tuple },\n { \"Enum8\", Type::Enum8 },\n { \"Enum16\", Type::Enum16 },\n { \"UUID\", Type::UUID },\n { \"IPv4\", Type::IPv4 },\n { \"IPv6\", Type::IPv6 },\n { \"Int128\", Type::Int128 },\n// { \"UInt128\", Type::UInt128 },\n { \"Decimal\", Type::Decimal },\n { \"Decimal32\", Type::Decimal32 },\n { \"Decimal64\", Type::Decimal64 },\n { \"Decimal128\", Type::Decimal128 },\n { \"LowCardinality\", Type::LowCardinality },\n { \"Map\", Type::Map },\n { \"Point\", Type::Point },\n { \"Ring\", Type::Ring },\n { \"Polygon\", Type::Polygon },\n { \"MultiPolygon\", Type::MultiPolygon },\n};\n\n", "metadata": {"chunk_id": "doc_24_chunk_2", "original_index": 2, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_2"}, "type": "Document"} +{"page_content": "template \ninline int CompateStringsCaseInsensitive(const L& left, const R& right) {\n int64_t size_diff = left.size() - right.size();\n if (size_diff != 0)\n return size_diff > 0 ? 1 : -1;\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n}\n\nstatic Type::Code GetTypeCode(const std::string& name) {\n auto it = kTypeCode.find(name);\n if (it != kTypeCode.end()) {\n return it->second;\n }\n\n return Type::Void;\n}\n\nstatic TypeAst::Meta GetTypeMeta(const StringView& name) {\n if (name == \"Array\") {\n return TypeAst::Array;\n }\n\n", "metadata": {"chunk_id": "doc_24_chunk_3", "original_index": 3, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_3"}, "type": "Document"} +{"page_content": " if (name == \"Null\") {\n return TypeAst::Null;\n }\n\n if (name == \"Nullable\") {\n return TypeAst::Nullable;\n }\n\n if (name == \"Tuple\") {\n return TypeAst::Tuple;\n }\n\n if (name == \"Enum8\" || name == \"Enum16\") {\n return TypeAst::Enum;\n }\n\n if (name == \"LowCardinality\") {\n return TypeAst::LowCardinality;\n }\n\n if (name == \"SimpleAggregateFunction\") {\n return TypeAst::SimpleAggregateFunction;\n }\n\n if (name == \"Map\") {\n return TypeAst::Map;\n }\n\n return TypeAst::Terminal;\n}\n\n", "metadata": {"chunk_id": "doc_24_chunk_4", "original_index": 4, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_4"}, "type": "Document"} +{"page_content": "bool ValidateAST(const TypeAst& ast) {\n // Void terminal that is not actually \"void\" produced when unknown type is encountered.\n if (ast.meta == TypeAst::Terminal\n && ast.code == Type::Void\n && CompateStringsCaseInsensitive(ast.name, std::string_view(\"void\")) != 0)\n //throw UnimplementedError(\"Unsupported type: \" + ast.name);\n return false;\n\n return true;\n}\n\n\nTypeParser::TypeParser(const StringView& name)\n : cur_(name.data())\n , end_(name.data() + name.size())\n , type_(nullptr)\n{\n}\n\nTypeParser::~TypeParser() = default;\n\nbool TypeParser::Parse(TypeAst* type) {\n type_ = type;\n open_elements_.push(type_);\n\n", "metadata": {"chunk_id": "doc_24_chunk_5", "original_index": 5, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_5"}, "type": "Document"} +{"page_content": " size_t processed_tokens = 0;\n do {\n const Token & token = NextToken();\n switch (token.type) {\n case Token::QuotedString:\n {\n type_->meta = TypeAst::Terminal;\n if (token.value.length() < 1)\n type_->value_string = {};\n else\n type_->value_string = token.value.substr(1, token.value.length() - 2).to_string();\n", "metadata": {"chunk_id": "doc_24_chunk_6", "original_index": 6, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_6"}, "type": "Document"} +{"page_content": " type_->code = Type::String;\n break;\n }\n case Token::Name:\n type_->meta = GetTypeMeta(token.value);\n type_->name = token.value.to_string();\n type_->code = GetTypeCode(type_->name);\n break;\n case Token::Number:\n type_->meta = TypeAst::Number;\n type_->value = std::stol(token.value.to_string());\n break;\n case Token::String:\n type_->meta = TypeAst::String;\n type_->value_string = std::string(token.value);\n", "metadata": {"chunk_id": "doc_24_chunk_7", "original_index": 7, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_7"}, "type": "Document"} +{"page_content": " break;\n case Token::LPar:\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::RPar:\n type_ = open_elements_.top();\n open_elements_.pop();\n break;\n case Token::Assign:\n case Token::Comma:\n type_ = open_elements_.top();\n open_elements_.pop();\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::EOS:\n {\n // Ubalanced braces, brackets, etc is an error.\n if (open_elements_.size() != 1)\n return false;\n\n", "metadata": {"chunk_id": "doc_24_chunk_8", "original_index": 8, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_8"}, "type": "Document"} +{"page_content": " // Empty input string, no tokens produced\n if (processed_tokens == 0)\n return false;\n\n return ValidateAST(*type);\n }\n case Token::Invalid:\n return false;\n }\n ++processed_tokens;\n } while (true);\n}\n\nTypeParser::Token TypeParser::NextToken() {\n for (; cur_ < end_; ++cur_) {\n switch (*cur_) {\n case ' ':\n case '\\n':\n case '\\t':\n case '\\0':\n continue;\n case '=':\n return Token{Token::Assign, StringView(cur_++, 1)};\n case '(':\n return Token{Token::LPar, StringView(cur_++, 1)};\n case ')':\n return Token{Token::RPar, StringView(cur_++, 1)};\n", "metadata": {"chunk_id": "doc_24_chunk_9", "original_index": 9, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_9"}, "type": "Document"} +{"page_content": " case ',':\n return Token{Token::Comma, StringView(cur_++, 1)};\n case '\\'':\n {\n const auto end_quote_length = 1;\n const StringView end_quote{cur_, end_quote_length};\n // Fast forward to the closing quote.\n const auto start = cur_++;\n for (; cur_ < end_ - end_quote_length; ++cur_) {\n // TODO (nemkov): handle escaping ?\n if (end_quote == StringView{cur_, end_quote_length}) {\n cur_ += end_quote_length;\n\n", "metadata": {"chunk_id": "doc_24_chunk_10", "original_index": 10, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_10"}, "type": "Document"} +{"page_content": " return Token{Token::QuotedString, StringView{start, cur_}};\n }\n }\n return Token{Token::QuotedString, StringView(cur_++, 1)};\n }\n\n default: {\n const char* st = cur_;\n\n if (*cur_ == '\\'') {\n for (st = ++cur_; cur_ < end_; ++cur_) {\n if (*cur_ == '\\'') {\n return Token{Token::String, StringView(st, cur_++ - st)};\n }\n }\n\n", "metadata": {"chunk_id": "doc_24_chunk_11", "original_index": 11, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_11"}, "type": "Document"} +{"page_content": " return Token{Token::Invalid, StringView()};\n }\n\n if (isalpha(*cur_) || *cur_ == '_') {\n for (; cur_ < end_; ++cur_) {\n if (!isalpha(*cur_) && !isdigit(*cur_) && *cur_ != '_') {\n break;\n }\n }\n\n return Token{Token::Name, StringView(st, cur_)};\n }\n\n if (isdigit(*cur_) || *cur_ == '-') {\n for (++cur_; cur_ < end_; ++cur_) {\n if (!isdigit(*cur_)) {\n break;\n }\n }\n\n", "metadata": {"chunk_id": "doc_24_chunk_12", "original_index": 12, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_12"}, "type": "Document"} +{"page_content": " return Token{Token::Number, StringView(st, cur_)};\n }\n\n return Token{Token::Invalid, StringView()};\n }\n }\n }\n\n return Token{Token::EOS, StringView()};\n}\n\n\nconst TypeAst* ParseTypeName(const std::string& type_name) {\n // Cache for type_name.\n // Usually we won't have too many type names in the cache, so do not try to\n // limit cache size.\n static std::map ast_cache;\n static std::mutex lock;\n\n std::lock_guard guard(lock);\n auto it = ast_cache.find(type_name);\n if (it != ast_cache.end()) {\n return &it->second;\n }\n\n auto& ast = ast_cache[type_name];\n if (TypeParser(type_name).Parse(&ast)) {\n return *\n }\n ast_cache.erase(type_name);\n return nullptr;\n}\n\n}\n", "metadata": {"chunk_id": "doc_24_chunk_13", "original_index": 13, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_13"}, "type": "Document"} +{"page_content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n// * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n// * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n", "metadata": {"chunk_id": "doc_25_chunk_0", "original_index": 0, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_0"}, "type": "Document"} +{"page_content": "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n", "metadata": {"chunk_id": "doc_25_chunk_1", "original_index": 1, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_1"}, "type": "Document"} +{"page_content": "//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the public API for death tests. It is\n// #included by gtest.h so a user doesn't need to include this\n// directly.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n#define GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n\n#include \"gtest/internal/gtest-death-test-internal.h\"\n\nnamespace testing {\n\n// This flag controls the style of death tests. Valid values are \"threadsafe\",\n// meaning that the death test child process will re-execute the test binary\n// from the start, running only a single death test, or \"fast\",\n// meaning that the child process will execute the test logic immediately\n// after forking.\nGTEST_DECLARE_string_(death_test_style);\n\n#if GTEST_HAS_DEATH_TEST\n\nnamespace internal {\n\n", "metadata": {"chunk_id": "doc_25_chunk_2", "original_index": 2, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_2"}, "type": "Document"} +{"page_content": "// Returns a Boolean value indicating whether the caller is currently\n// executing in the context of the death test child process. Tools such as\n// Valgrind heap checkers may need this to modify their behavior in death\n// tests. IMPORTANT: This is an internal utility. Using it may break the\n// implementation of death tests. User code MUST NOT use it.\nGTEST_API_ bool InDeathTestChild();\n\n} // namespace internal\n\n// The following macros are useful for writing death tests.\n\n// Here's what happens when an ASSERT_DEATH* or EXPECT_DEATH* is\n// executed:\n//\n// 1. It generates a warning if there is more than one active\n// thread. This is because it's safe to fork() or clone() only\n// when there is a single thread.\n//\n// 2. The parent process clone()s a sub-process and runs the death\n// test in it; the sub-process exits with code 0 at the end of the\n// death test, if it hasn't exited already.\n//\n", "metadata": {"chunk_id": "doc_25_chunk_3", "original_index": 3, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_3"}, "type": "Document"} +{"page_content": "// 3. The parent process waits for the sub-process to terminate.\n//\n// 4. The parent process checks the exit code and error message of\n// the sub-process.\n//\n// Examples:\n//\n// ASSERT_DEATH(server.SendMessage(56, \"Hello\"), \"Invalid port number\");\n// for (int i = 0; i < 5; i++) {\n// EXPECT_DEATH(server.ProcessRequest(i),\n// \"Invalid request .* in ProcessRequest()\")\n// << \"Failed to die on request \" << i;\n// }\n//\n// ASSERT_EXIT(server.ExitNow(), ::testing::ExitedWithCode(0), \"Exiting\");\n//\n// bool KilledBySIGHUP(int exit_code) {\n// return WIFSIGNALED(exit_code) && WTERMSIG(exit_code) == SIGHUP;\n// }\n//\n// ASSERT_EXIT(client.HangUpServer(), KilledBySIGHUP, \"Hanging up!\");\n//\n// The final parameter to each of these macros is a matcher applied to any data\n", "metadata": {"chunk_id": "doc_25_chunk_4", "original_index": 4, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_4"}, "type": "Document"} +{"page_content": "// the sub-process wrote to stderr. For compatibility with existing tests, a\n// bare string is interpreted as a regular expression matcher.\n//\n// On the regular expressions used in death tests:\n//\n// GOOGLETEST_CM0005 DO NOT DELETE\n// On POSIX-compliant systems (*nix), we use the library,\n// which uses the POSIX extended regex syntax.\n//\n// On other platforms (e.g. Windows or Mac), we only support a simple regex\n// syntax implemented as part of Google Test. This limited\n// implementation should be enough most of the time when writing\n// death tests; though it lacks many features you can find in PCRE\n// or POSIX extended regex syntax. For example, we don't support\n", "metadata": {"chunk_id": "doc_25_chunk_5", "original_index": 5, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_5"}, "type": "Document"} +{"page_content": "// union (\"x|y\"), grouping (\"(xy)\"), brackets (\"[xy]\"), and\n// repetition count (\"x{5,7}\"), among others.\n//\n// Below is the syntax that we do support. We chose it to be a\n// subset of both PCRE and POSIX extended regex, so it's easy to\n// learn wherever you come from. In the following: 'A' denotes a\n// literal character, period (.), or a single \\\\ escape sequence;\n// 'x' and 'y' denote regular expressions; 'm' and 'n' are for\n", "metadata": {"chunk_id": "doc_25_chunk_6", "original_index": 6, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_6"}, "type": "Document"} +{"page_content": "// natural numbers.\n//\n// c matches any literal character c\n// \\\\d matches any decimal digit\n// \\\\D matches any character that's not a decimal digit\n// \\\\f matches \\f\n// \\\\n matches \\n\n// \\\\r matches \\r\n// \\\\s matches any ASCII whitespace, including \\n\n// \\\\S matches any character that's not a whitespace\n// \\\\t matches \\t\n// \\\\v matches \\v\n// \\\\w matches any letter, _, or decimal digit\n// \\\\W matches any character that \\\\w doesn't match\n// \\\\c matches any literal character c, which must be a punctuation\n// . matches any single character except \\n\n", "metadata": {"chunk_id": "doc_25_chunk_7", "original_index": 7, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_7"}, "type": "Document"} +{"page_content": "// A? matches 0 or 1 occurrences of A\n// A* matches 0 or many occurrences of A\n// A+ matches 1 or many occurrences of A\n// ^ matches the beginning of a string (not that of each line)\n// $ matches the end of a string (not that of each line)\n// xy matches x followed by y\n//\n// If you accidentally use PCRE or POSIX extended regex features\n// not implemented by us, you will get a run-time failure. In that\n", "metadata": {"chunk_id": "doc_25_chunk_8", "original_index": 8, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_8"}, "type": "Document"} +{"page_content": "// case, please try to rewrite your regular expression within the\n// above syntax.\n//\n// This implementation is *not* meant to be as highly tuned or robust\n// as a compiled regex library, but should perform well enough for a\n// death test, which already incurs significant overhead by launching\n// a child process.\n//\n// Known caveats:\n//\n// A \"threadsafe\" style death test obtains the path to the test\n// program from argv[0] and re-executes it in the sub-process. For\n// simplicity, the current implementation doesn't search the PATH\n// when launching the sub-process. This means that the user must\n// invoke the test program via a path that contains at least one\n// path separator (e.g. path/to/foo_test and\n// /absolute/path/to/bar_test are fine, but foo_test is not). This\n// is rarely a problem as people usually don't put the test binary\n// directory in PATH.\n//\n\n", "metadata": {"chunk_id": "doc_25_chunk_9", "original_index": 9, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_9"}, "type": "Document"} +{"page_content": "// Asserts that a given `statement` causes the program to exit, with an\n// integer exit status that satisfies `predicate`, and emitting error output\n// that matches `matcher`.\n# define ASSERT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_FATAL_FAILURE_)\n\n// Like `ASSERT_EXIT`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_NONFATAL_FAILURE_)\n\n", "metadata": {"chunk_id": "doc_25_chunk_10", "original_index": 10, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_10"}, "type": "Document"} +{"page_content": "// Asserts that a given `statement` causes the program to exit, either by\n// explicitly exiting with a nonzero exit code or being killed by a\n// signal, and emitting error output that matches `matcher`.\n# define ASSERT_DEATH(statement, matcher) \\\n ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Like `ASSERT_DEATH`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_DEATH(statement, matcher) \\\n EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*:\n\n", "metadata": {"chunk_id": "doc_25_chunk_11", "original_index": 11, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_11"}, "type": "Document"} +{"page_content": "// Tests that an exit code describes a normal exit with a given exit code.\nclass GTEST_API_ ExitedWithCode {\n public:\n explicit ExitedWithCode(int exit_code);\n ExitedWithCode(const ExitedWithCode&) = default;\n void operator=(const ExitedWithCode& other) = delete;\n bool operator()(int exit_status) const;\n private:\n const int exit_code_;\n};\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// Tests that an exit code describes an exit due to termination by a\n// given signal.\n// GOOGLETEST_CM0006 DO NOT DELETE\nclass GTEST_API_ KilledBySignal {\n public:\n explicit KilledBySignal(int signum);\n bool operator()(int exit_status) const;\n private:\n const int signum_;\n};\n# endif // !GTEST_OS_WINDOWS\n\n", "metadata": {"chunk_id": "doc_25_chunk_12", "original_index": 12, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_12"}, "type": "Document"} +{"page_content": "// EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode.\n// The death testing framework causes this to have interesting semantics,\n// since the sideeffects of the call are only visible in opt mode, and not\n// in debug mode.\n//\n// In practice, this can be used to test functions that utilize the\n// LOG(DFATAL) macro using the following style:\n//\n// int DieInDebugOr12(int* sideeffect) {\n// if (sideeffect) {\n// *sideeffect = 12;\n// }\n", "metadata": {"chunk_id": "doc_25_chunk_13", "original_index": 13, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_13"}, "type": "Document"} +{"page_content": "// LOG(DFATAL) << \"death\";\n// return 12;\n// }\n//\n// TEST(TestSuite, TestDieOr12WorksInDgbAndOpt) {\n// int sideeffect = 0;\n// // Only asserts in dbg.\n// EXPECT_DEBUG_DEATH(DieInDebugOr12(&sideeffect), \"death\");\n//\n// #ifdef NDEBUG\n// // opt-mode has sideeffect visible.\n// EXPECT_EQ(12, sideeffect);\n// #else\n// // dbg-mode no visible sideeffect.\n// EXPECT_EQ(0, sideeffect);\n// #endif\n// }\n//\n// This will assert that DieInDebugReturn12InOpt() crashes in debug\n// mode, usually due to a DCHECK or LOG(DFATAL), but returns the\n// appropriate fallback value (12 in this case) in opt mode. If you\n// need to test that a function has appropriate side-effects in opt\n// mode, include assertions against the side-effects. A general\n// pattern for this is:\n//\n// EXPECT_DEBUG_DEATH({\n// // Side-effects here will have an effect after this statement in\n// // opt mode, but none in debug mode.\n// EXPECT_EQ(12, DieInDebugOr12(&sideeffect));\n// }, \"death\");\n//\n# ifdef NDEBUG\n\n", "metadata": {"chunk_id": "doc_25_chunk_14", "original_index": 14, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_14"}, "type": "Document"} +{"page_content": "# define EXPECT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# else\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n\n# endif // NDEBUG for EXPECT_DEBUG_DEATH\n#endif // GTEST_HAS_DEATH_TEST\n\n", "metadata": {"chunk_id": "doc_25_chunk_15", "original_index": 15, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_15"}, "type": "Document"} +{"page_content": "// This macro is used for implementing macros such as\n// EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED on systems where\n// death tests are not supported. Those macros must compile on such systems\n// if and only if EXPECT_DEATH and ASSERT_DEATH compile with the same parameters\n// on systems that support death tests. This allows one to write such a macro on\n// a system that does not support death tests and be sure that it will compile\n// on a death-test supporting system. It is exposed publicly so that systems\n// that have death-tests with stricter requirements than GTEST_HAS_DEATH_TEST\n", "metadata": {"chunk_id": "doc_25_chunk_16", "original_index": 16, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_16"}, "type": "Document"} +{"page_content": "// can write their own equivalent of EXPECT_DEATH_IF_SUPPORTED and\n// ASSERT_DEATH_IF_SUPPORTED.\n//\n// Parameters:\n// statement - A statement that a macro such as EXPECT_DEATH would test\n// for program termination. This macro has to make sure this\n// statement is compiled but not executed, to ensure that\n// EXPECT_DEATH_IF_SUPPORTED compiles with a certain\n// parameter if and only if EXPECT_DEATH compiles with it.\n// regex - A regex that a macro such as EXPECT_DEATH would use to test\n", "metadata": {"chunk_id": "doc_25_chunk_17", "original_index": 17, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_17"}, "type": "Document"} +{"page_content": "// the output of statement. This parameter has to be\n// compiled but not evaluated by this macro, to ensure that\n// this macro only accepts expressions that a macro such as\n// EXPECT_DEATH would accept.\n// terminator - Must be an empty statement for EXPECT_DEATH_IF_SUPPORTED\n// and a return statement for ASSERT_DEATH_IF_SUPPORTED.\n// This ensures that ASSERT_DEATH_IF_SUPPORTED will not\n// compile inside functions where ASSERT_DEATH doesn't\n// compile.\n//\n// The branch that has an always false condition is used to ensure that\n// statement and regex are compiled (and thus syntactically correct) but\n// never executed. The unreachable code macro protects the terminator\n// statement from generating an 'unreachable code' warning in case\n// statement unconditionally returns or throws. The Message constructor at\n", "metadata": {"chunk_id": "doc_25_chunk_18", "original_index": 18, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_18"}, "type": "Document"} +{"page_content": "// the end allows the syntax of streaming additional messages into the\n// macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH.\n# define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \\\n GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n if (::testing::internal::AlwaysTrue()) { \\\n GTEST_LOG_(WARNING) \\\n << \"Death tests are not supported on this platform.\\n\" \\\n << \"Statement '\" #statement \"' cannot be verified.\"; \\\n } else if (::testing::internal::AlwaysFalse()) { \\\n ::testing::internal::RE::PartialMatch(\".*\", (regex)); \\\n GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n terminator; \\\n } else \\\n ::testing::Message()\n\n", "metadata": {"chunk_id": "doc_25_chunk_19", "original_index": 19, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_19"}, "type": "Document"} +{"page_content": "// EXPECT_DEATH_IF_SUPPORTED(statement, regex) and\n// ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if\n// death tests are supported; otherwise they just issue a warning. This is\n// useful when you are combining death test assertions with normal test\n// assertions in one test.\n#if GTEST_HAS_DEATH_TEST\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n#else\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, )\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return)\n#endif\n\n} // namespace testing\n\n#endif // GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n", "metadata": {"chunk_id": "doc_25_chunk_20", "original_index": 20, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_20"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \n#include \n\nnamespace clickhouse {\n\nclass InputStream;\nclass OutputStream;\n\nclass WireFormat {\npublic:\n template \n static bool ReadFixed(InputStream& input, T* value);\n static bool ReadString(InputStream& input, std::string* value);\n static bool SkipString(InputStream& input);\n static bool ReadBytes(InputStream& input, void* buf, size_t len);\n static bool ReadUInt64(InputStream& input, uint64_t* value);\n static bool ReadVarint64(InputStream& output, uint64_t* value);\n\n template \n static void WriteFixed(OutputStream& output, const T& value);\n static void WriteBytes(OutputStream& output, const void* buf, size_t len);\n static void WriteString(OutputStream& output, std::string_view value);\n static void WriteUInt64(OutputStream& output, const uint64_t value);\n static void WriteVarint64(OutputStream& output, uint64_t value);\n\n", "metadata": {"chunk_id": "doc_26_chunk_0", "original_index": 0, "pid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36_0"}, "type": "Document"} +{"page_content": "private:\n static bool ReadAll(InputStream& input, void* buf, size_t len);\n static void WriteAll(OutputStream& output, const void* buf, size_t len);\n};\n\ntemplate \ninline bool WireFormat::ReadFixed(InputStream& input, T* value) {\n return ReadAll(input, value, sizeof(T));\n}\n\ninline bool WireFormat::ReadString(InputStream& input, std::string* value) {\n uint64_t len = 0;\n if (ReadVarint64(input, &len)) {\n if (len > 0x00FFFFFFULL) {\n return false;\n }\n value->resize((size_t)len);\n return ReadAll(input, value->data(), (size_t)len);\n }\n\n", "metadata": {"chunk_id": "doc_26_chunk_1", "original_index": 1, "pid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36_1"}, "type": "Document"} +{"page_content": " return false;\n}\n\ninline bool WireFormat::ReadBytes(InputStream& input, void* buf, size_t len) {\n return ReadAll(input, buf, len);\n}\n\ninline bool WireFormat::ReadUInt64(InputStream& input, uint64_t* value) {\n return ReadVarint64(input, value);\n}\n\ntemplate \ninline void WireFormat::WriteFixed(OutputStream& output, const T& value) {\n WriteAll(output, &value, sizeof(T));\n}\n\ninline void WireFormat::WriteBytes(OutputStream& output, const void* buf, size_t len) {\n WriteAll(output, buf, len);\n}\n\ninline void WireFormat::WriteString(OutputStream& output, std::string_view value) {\n WriteVarint64(output, value.size());\n WriteAll(output, value.data(), value.size());\n}\n\ninline void WireFormat::WriteUInt64(OutputStream& output, const uint64_t value) {\n WriteVarint64(output, value);\n}\n\n}\n", "metadata": {"chunk_id": "doc_26_chunk_2", "original_index": 2, "pid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36_2"}, "type": "Document"} +{"page_content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", "metadata": {"chunk_id": "doc_27_chunk_0", "original_index": 0, "pid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d_0"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"nullable.h\"\n\n#include \n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate \nclass ColumnLowCardinalityT;\n\nnamespace details {\n\n/** LowCardinalityHashKey used as key in unique items hashmap to abstract away key value\n * (type of which depends on dictionary column) and to reduce likelehood of collisions.\n *\n * In order to dramatically reduce collision rate, we use 2 different hashes from 2 different hash functions.\n * First hash is used in hashtable (to calculate item position).\n * Second one is used as part of key value and accessed via `operator==()` upon collision resolution/detection.\n */\nusing LowCardinalityHashKey = std::pair;\n\n", "metadata": {"chunk_id": "doc_28_chunk_0", "original_index": 0, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_0"}, "type": "Document"} +{"page_content": "struct LowCardinalityHashKeyHash {\n inline std::size_t operator()(const LowCardinalityHashKey &hash_key) const noexcept {\n return hash_key.first;\n }\n};\n\n}\n\n/*\n * LC column contains an \"invisible\" default item at the beginning of the collection. [default, ...]\n * If the nested type is Nullable, it contains a null-item at the beginning and a default item at the second position. [null, default, ...]\n * Null map is not serialized in LC columns. Instead, nulls are tracked by having an index of 0.\n * */\nclass ColumnLowCardinality : public Column {\npublic:\n using UniqueItems = std::unordered_map;\n\n", "metadata": {"chunk_id": "doc_28_chunk_1", "original_index": 1, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_1"}, "type": "Document"} +{"page_content": " template \n friend class ColumnLowCardinalityT;\n\nprivate:\n // IMPLEMENTATION NOTE: ColumnLowCardinalityT takes reference to underlying dictionary column object,\n // so make sure to NOT change address of the dictionary object (with reset(), swap()) or with anything else.\n ColumnRef dictionary_column_;\n ColumnRef index_column_;\n UniqueItems unique_items_map_;\n\npublic:\n ColumnLowCardinality(ColumnLowCardinality&& col) = default;\n // c-tor makes a deep copy of the dictionary_column.\n explicit ColumnLowCardinality(ColumnRef dictionary_column);\n explicit ColumnLowCardinality(std::shared_ptr dictionary_column);\n\n template \n explicit ColumnLowCardinality(std::shared_ptr> dictionary_column)\n : ColumnLowCardinality(dictionary_column->template As())\n {}\n\n", "metadata": {"chunk_id": "doc_28_chunk_2", "original_index": 2, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_2"}, "type": "Document"} +{"page_content": " ~ColumnLowCardinality();\n\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends another LowCardinality column to the end of this one, updating dictionary.\n void Append(ColumnRef /*column*/) override;\n\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data.\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n", "metadata": {"chunk_id": "doc_28_chunk_3", "original_index": 3, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_3"}, "type": "Document"} +{"page_content": " /// Makes slice of current column, with compacted dictionary\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n ItemView GetItem(size_t index) const override;\n\n size_t GetDictionarySize() const;\n TypeRef GetNestedType() const;\n\nprotected:\n std::uint64_t getDictionaryIndex(std::uint64_t item_index) const;\n void appendIndex(std::uint64_t item_index);\n void removeLastIndex();\n ColumnRef GetDictionary();\n\n void AppendUnsafe(const ItemView &);\n\nprivate:\n void Setup(ColumnRef dictionary_column);\n void AppendNullItem();\n void AppendDefaultItem();\n\npublic:\n static details::LowCardinalityHashKey computeHashKey(const ItemView &);\n};\n\n/** Type-aware wrapper that provides simple convenience interface for accessing/appending individual items.\n */\ntemplate \nclass ColumnLowCardinalityT : public ColumnLowCardinality {\n\n", "metadata": {"chunk_id": "doc_28_chunk_4", "original_index": 4, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_4"}, "type": "Document"} +{"page_content": " DictionaryColumnType& typed_dictionary_;\n const Type::Code type_;\n\npublic:\n using WrappedColumnType = DictionaryColumnType;\n // Type this column takes as argument of Append and returns with At() and operator[]\n using ValueType = typename DictionaryColumnType::ValueType;\n\n explicit ColumnLowCardinalityT(ColumnLowCardinality&& col)\n : ColumnLowCardinality(std::move(col))\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {\n }\n\n template \n explicit ColumnLowCardinalityT(Args &&... args)\n : ColumnLowCardinalityT(std::make_shared(std::forward(args)...))\n {}\n\n", "metadata": {"chunk_id": "doc_28_chunk_5", "original_index": 5, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_5"}, "type": "Document"} +{"page_content": " // Create LC column from existing T-column, making a deep copy of all contents.\n explicit ColumnLowCardinalityT(std::shared_ptr dictionary_col)\n : ColumnLowCardinality(dictionary_col)\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {}\n\n /// Extended interface to simplify reading/adding individual items.\n\n /// Returns element at given row number.\n inline ValueType At(size_t n) const {\n return typed_dictionary_.At(getDictionaryIndex(n));\n }\n\n /// Returns element at given row number.\n inline ValueType operator [] (size_t n) const {\n return typed_dictionary_[getDictionaryIndex(n)];\n }\n\n // so the non-virtual Append below doesn't shadow Append() from base class when compiled with older compilers.\n using ColumnLowCardinality::Append;\n\n", "metadata": {"chunk_id": "doc_28_chunk_6", "original_index": 6, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_6"}, "type": "Document"} +{"page_content": " inline void Append(const ValueType & value) {\n if constexpr (IsNullable) {\n if (value.has_value()) {\n AppendUnsafe(ItemView{type_, *value});\n } else {\n AppendUnsafe(ItemView{});\n }\n } else {\n AppendUnsafe(ItemView{type_, value});\n }\n }\n\n template \n inline void AppendMany(const T& container) {\n for (const auto & item : container) {\n Append(item);\n }\n }\n\n", "metadata": {"chunk_id": "doc_28_chunk_7", "original_index": 7, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_7"}, "type": "Document"} +{"page_content": " /** Create a ColumnLowCardinalityT from a ColumnLowCardinality, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnLowCardinality&& col) {\n return std::make_shared>(std::move(col));\n }\n\n", "metadata": {"chunk_id": "doc_28_chunk_8", "original_index": 8, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_8"}, "type": "Document"} +{"page_content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnLowCardinality::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnLowCardinality::CloneEmpty()); }\n\nprivate:\n\n template \n static auto GetTypeCode(T& column) {\n if constexpr (IsNullable) {\n return GetTypeCode(*column.Nested()->template AsStrict());\n } else {\n return column.Type()->GetCode();\n }\n }\n};\n\n}\n", "metadata": {"chunk_id": "doc_28_chunk_9", "original_index": 9, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_9"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\n", "metadata": {"chunk_id": "doc_29_chunk_0", "original_index": 0, "pid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_0"}, "type": "Document"} +{"page_content": "public:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n", "metadata": {"chunk_id": "doc_29_chunk_1", "original_index": 1, "pid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_1"}, "type": "Document"} +{"page_content": " /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "metadata": {"chunk_id": "doc_29_chunk_2", "original_index": 2, "pid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_2"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate ()(std::declval())),\n typename Value = std::decay_t>\nclass ProjectedIterator {\npublic:\n using value_type = Value;\n using reference = Reference;\n using pointer = Reference;\n using difference_type = typename std::iterator_traits::difference_type;\n using iterator_category = typename std::iterator_traits::iterator_category;\n\n", "metadata": {"chunk_id": "doc_30_chunk_0", "original_index": 0, "pid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd_0"}, "type": "Document"} +{"page_content": " ProjectedIterator() = default;\n\n inline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\n : iterator_(iterator)\n , functor_(std::move(functor)) {\n }\n\n inline UnaryFunction functor() const { return functor; }\n\n inline Iterator const& base() const { return iterator_; }\n\n inline reference operator*() const { return functor_(iterator_); }\n\n inline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n }\n\n inline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n }\n\n inline bool operator==(const ProjectedIterator& other) const {\n return this->iterator_ == other.iterator_;\n }\n\n inline bool operator!=(const ProjectedIterator& other) const {\n return !(*this == other);\n }\n\nprivate:\n Iterator iterator_;\n UnaryFunction functor_;\n};\n\n} // namespace clickhouse\n", "metadata": {"chunk_id": "doc_30_chunk_1", "original_index": 1, "pid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd_1"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", "metadata": {"chunk_id": "doc_31_chunk_0", "original_index": 0, "pid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8_0"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\n", "metadata": {"chunk_id": "doc_32_chunk_0", "original_index": 0, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_0"}, "type": "Document"} +{"page_content": "import com.fasterxml.jackson.annotation.JsonIgnoreProperties;\nimport com.google.cloud.tools.jib.api.LogEvent;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.http.Request;\nimport com.google.cloud.tools.jib.http.Response;\nimport com.google.cloud.tools.jib.json.JsonTemplate;\nimport com.google.cloud.tools.jib.json.JsonTemplateMapper;\nimport com.google.cloud.tools.jib.plugins.common.globalconfig.GlobalConfig;\nimport com.google.common.annotations.VisibleForTesting;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardCopyOption;\nimport java.time.Duration;\nimport java.time.Instant;\nimport java.time.format.DateTimeParseException;\nimport java.util.Optional;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Future;\nimport java.util.function.Consumer;\n\n", "metadata": {"chunk_id": "doc_32_chunk_1", "original_index": 1, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_1"}, "type": "Document"} +{"page_content": "/** Checks if Jib is up-to-date. */\npublic class UpdateChecker {\n\n private static final String LAST_UPDATE_CHECK_FILENAME = \"lastUpdateCheck\";\n\n /** JSON template for content downloaded during version check. */\n @JsonIgnoreProperties(ignoreUnknown = true)\n private static class VersionJsonTemplate implements JsonTemplate {\n private String latest = \"\";\n }\n\n", "metadata": {"chunk_id": "doc_32_chunk_2", "original_index": 2, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_2"}, "type": "Document"} +{"page_content": " /**\n * Begins checking for an update in a separate thread.\n *\n * @param executorService the {@link ExecutorService}\n * @param versionUrl the location to check for the latest version\n * @param toolName the tool name\n * @param toolVersion the tool version\n * @param log {@link Consumer} used to log messages\n * @return a new {@link UpdateChecker}\n */\n public static Future> checkForUpdate(\n ExecutorService executorService,\n String versionUrl,\n String toolName,\n String toolVersion,\n Consumer log) {\n return executorService.submit(\n () ->\n performUpdateCheck(\n GlobalConfig.getConfigDir(), toolVersion, versionUrl, toolName, log));\n }\n\n", "metadata": {"chunk_id": "doc_32_chunk_3", "original_index": 3, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_3"}, "type": "Document"} +{"page_content": " @VisibleForTesting\n static Optional performUpdateCheck(\n Path configDir,\n String currentVersion,\n String versionUrl,\n String toolName,\n Consumer log) {\n Path lastUpdateCheck = configDir.resolve(LAST_UPDATE_CHECK_FILENAME);\n\n try {\n // Check time of last update check\n if (Files.exists(lastUpdateCheck)) {\n try {\n String fileContents =\n new String(Files.readAllBytes(lastUpdateCheck), StandardCharsets.UTF_8);\n Instant modifiedTime = Instant.parse(fileContents);\n if (modifiedTime.plus(Duration.ofDays(1)).isAfter(Instant.now())) {\n return Optional.empty();\n }\n } catch (DateTimeParseException | IOException ex) {\n // If reading update time failed, file might be corrupt, so delete it\n log.accept(LogEvent.debug(\"Failed to read lastUpdateCheck; \" + ex.getMessage()));\n Files.delete(lastUpdateCheck);\n }\n }\n\n", "metadata": {"chunk_id": "doc_32_chunk_4", "original_index": 4, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_4"}, "type": "Document"} +{"page_content": " // Check for update\n FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n try {\n Response response =\n httpClient.get(\n new URL(versionUrl),\n Request.builder()\n .setHttpTimeout(3000)\n .setUserAgent(\"jib \" + currentVersion + \" \" + toolName)\n .build());\n VersionJsonTemplate version =\n JsonTemplateMapper.readJson(response.getBody(), VersionJsonTemplate.class);\n\n", "metadata": {"chunk_id": "doc_32_chunk_5", "original_index": 5, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_5"}, "type": "Document"} +{"page_content": " Path lastUpdateCheckTemp =\n Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null);\n lastUpdateCheckTemp.toFile().deleteOnExit();\n Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8));\n Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING);\n\n if (currentVersion.equals(version.latest)) {\n return Optional.empty();\n }\n return Optional.of(version.latest);\n } finally {\n httpClient.shutDown();\n }\n\n } catch (IOException ex) {\n log.accept(LogEvent.debug(\"Update check failed; \" + ex.getMessage()));\n }\n\n return Optional.empty();\n }\n\n", "metadata": {"chunk_id": "doc_32_chunk_6", "original_index": 6, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_6"}, "type": "Document"} +{"page_content": " /**\n * Returns the latest Jib version available if the check succeeded and the current version is\n * outdated, or returns {@code Optional.empty()} if the check was interrupted or did not determine\n * that a later version was available.\n *\n * @param updateMessageFuture the {@link Future} returned by {@link UpdateChecker#checkForUpdate}\n * @return the latest version, if found, else {@code Optional.empty()}.\n */\n public static Optional finishUpdateCheck(Future> updateMessageFuture) {\n if (updateMessageFuture.isDone()) {\n try {\n return updateMessageFuture.get();\n } catch (InterruptedException | ExecutionException ex) {\n // No need to restore the interrupted status. The intention here is to silently consume any\n // kind of error\n }\n }\n updateMessageFuture.cancel(true);\n return Optional.empty();\n }\n\n private UpdateChecker() {}\n}\n", "metadata": {"chunk_id": "doc_32_chunk_7", "original_index": 7, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_7"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "metadata": {"chunk_id": "doc_33_chunk_0", "original_index": 0, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_0"}, "type": "Document"} +{"page_content": "package com.google.cloud.tools.jib.plugins.common;\n\nimport static com.google.common.truth.Truth.assertThat;\nimport static com.google.common.truth.Truth8.assertThat;\nimport static org.junit.Assert.assertThrows;\nimport static org.mockito.ArgumentMatchers.anyString;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\n", "metadata": {"chunk_id": "doc_33_chunk_1", "original_index": 1, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_1"}, "type": "Document"} +{"page_content": "import com.google.cloud.tools.jib.api.Credential;\nimport com.google.cloud.tools.jib.api.CredentialRetriever;\nimport com.google.cloud.tools.jib.frontend.CredentialRetrieverFactory;\nimport com.google.common.collect.ImmutableMap;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n/** Tests for {@link DefaultCredentialRetrievers}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class DefaultCredentialRetrieversTest {\n\n @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n", "metadata": {"chunk_id": "doc_33_chunk_2", "original_index": 2, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_2"}, "type": "Document"} +{"page_content": " @Mock private CredentialRetrieverFactory mockCredentialRetrieverFactory;\n @Mock private CredentialRetriever mockDockerCredentialHelperCredentialRetriever;\n @Mock private CredentialRetriever mockKnownCredentialRetriever;\n @Mock private CredentialRetriever mockInferredCredentialRetriever;\n @Mock private CredentialRetriever mockWellKnownCredentialHelpersCredentialRetriever;\n @Mock private CredentialRetriever mockXdgPrimaryCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvLegacyDockerConfigCredentialRetriever;\n", "metadata": {"chunk_id": "doc_33_chunk_3", "original_index": 3, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_3"}, "type": "Document"} +{"page_content": " @Mock private CredentialRetriever mockSystemHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockApplicationDefaultCredentialRetriever;\n\n", "metadata": {"chunk_id": "doc_33_chunk_4", "original_index": 4, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_4"}, "type": "Document"} +{"page_content": " private Properties properties;\n private Map environment;\n\n private final Credential knownCredential = Credential.from(\"username\", \"password\");\n private final Credential inferredCredential = Credential.from(\"username2\", \"password2\");\n\n @Before\n public void setUp() {\n properties = new Properties();\n properties.setProperty(\"os.name\", \"unknown\");\n properties.setProperty(\"user.home\", Paths.get(\"/system/home\").toString());\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/docker_config\").toString(),\n \"XDG_RUNTIME_DIR\",\n Paths.get(\"/run/user/1000\").toString(),\n \"XDG_CONFIG_HOME\",\n Paths.get(\"/env/home/.config\").toString());\n\n", "metadata": {"chunk_id": "doc_33_chunk_5", "original_index": 5, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_5"}, "type": "Document"} +{"page_content": " when(mockCredentialRetrieverFactory.dockerCredentialHelper(anyString()))\n .thenReturn(mockDockerCredentialHelperCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(knownCredential, \"credentialSource\"))\n .thenReturn(mockKnownCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(inferredCredential, \"inferredCredentialSource\"))\n .thenReturn(mockInferredCredentialRetriever);\n when(mockCredentialRetrieverFactory.wellKnownCredentialHelpers())\n .thenReturn(mockWellKnownCredentialHelpersCredentialRetriever);\n\n", "metadata": {"chunk_id": "doc_33_chunk_6", "original_index": 6, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_6"}, "type": "Document"} +{"page_content": " when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/run/user/1000/containers/auth.json\")))\n .thenReturn(mockXdgPrimaryCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.config/containers/auth.json\")))\n .thenReturn(mockEnvHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.config/containers/auth.json\")))\n .thenReturn(mockSystemHomeXdgCredentialRetriever);\n\n", "metadata": {"chunk_id": "doc_33_chunk_7", "original_index": 7, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_7"}, "type": "Document"} +{"page_content": " when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/config.json\")))\n .thenReturn(mockDockerConfigEnvDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/.dockerconfigjson\")))\n .thenReturn(mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(Paths.get(\"/docker_config/.dockercfg\")))\n .thenReturn(mockDockerConfigEnvLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/system/home/.docker/config.json\")))\n .thenReturn(mockSystemHomeDockerConfigCredentialRetriever);\n", "metadata": {"chunk_id": "doc_33_chunk_8", "original_index": 8, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_8"}, "type": "Document"} +{"page_content": " when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockSystemHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/system/home/.docker/.dockercfg\")))\n .thenReturn(mockSystemHomeLegacyDockerConfigCredentialRetriever);\n", "metadata": {"chunk_id": "doc_33_chunk_9", "original_index": 9, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_9"}, "type": "Document"} +{"page_content": " when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/env/home/.docker/config.json\")))\n .thenReturn(mockEnvHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockEnvHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/env/home/.docker/.dockercfg\")))\n .thenReturn(mockEnvHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.googleApplicationDefaultCredentials())\n .thenReturn(mockApplicationDefaultCredentialRetriever);\n }\n\n", "metadata": {"chunk_id": "doc_33_chunk_10", "original_index": 10, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_10"}, "type": "Document"} +{"page_content": " @Test\n public void testAsList() throws FileNotFoundException {\n List retriever =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retriever)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n", "metadata": {"chunk_id": "doc_33_chunk_11", "original_index": 11, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_11"}, "type": "Document"} +{"page_content": " mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "metadata": {"chunk_id": "doc_33_chunk_12", "original_index": 12, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_12"}, "type": "Document"} +{"page_content": " @Test\n public void testAsList_all() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setKnownCredential(knownCredential, \"credentialSource\")\n .setInferredCredential(inferredCredential, \"inferredCredentialSource\")\n .setCredentialHelper(\"credentialHelperSuffix\")\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockKnownCredentialRetriever,\n mockDockerCredentialHelperCredentialRetriever,\n mockInferredCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n", "metadata": {"chunk_id": "doc_33_chunk_13", "original_index": 13, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_13"}, "type": "Document"} +{"page_content": " mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n", "metadata": {"chunk_id": "doc_33_chunk_14", "original_index": 14, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_14"}, "type": "Document"} +{"page_content": " verify(mockCredentialRetrieverFactory).known(knownCredential, \"credentialSource\");\n verify(mockCredentialRetrieverFactory).known(inferredCredential, \"inferredCredentialSource\");\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(\"docker-credential-credentialHelperSuffix\");\n }\n\n", "metadata": {"chunk_id": "doc_33_chunk_15", "original_index": 15, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_15"}, "type": "Document"} +{"page_content": " @Test\n public void testAsList_credentialHelperPath() throws IOException {\n Path fakeCredentialHelperPath = temporaryFolder.newFile(\"fake-credHelper\").toPath();\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(fakeCredentialHelperPath.toString());\n\n", "metadata": {"chunk_id": "doc_33_chunk_16", "original_index": 16, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_16"}, "type": "Document"} +{"page_content": " List retrievers = credentialRetrievers.asList();\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n", "metadata": {"chunk_id": "doc_33_chunk_17", "original_index": 17, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_17"}, "type": "Document"} +{"page_content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(fakeCredentialHelperPath.toString());\n\n", "metadata": {"chunk_id": "doc_33_chunk_18", "original_index": 18, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_18"}, "type": "Document"} +{"page_content": " Files.delete(fakeCredentialHelperPath);\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex)\n .hasMessageThat()\n .isEqualTo(\"Specified credential helper was not found: \" + fakeCredentialHelperPath);\n }\n\n @Test\n public void testDockerConfigRetrievers_undefinedHome() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(\n mockCredentialRetrieverFactory, new Properties(), new HashMap<>())\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "metadata": {"chunk_id": "doc_33_chunk_19", "original_index": 19, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_19"}, "type": "Document"} +{"page_content": " @Test\n public void testDockerConfigRetrievers_noDuplicateRetrievers() throws FileNotFoundException {\n properties.setProperty(\"user.home\", Paths.get(\"/env/home\").toString());\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n", "metadata": {"chunk_id": "doc_33_chunk_20", "original_index": 20, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_20"}, "type": "Document"} +{"page_content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n", "metadata": {"chunk_id": "doc_33_chunk_21", "original_index": 21, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_21"}, "type": "Document"} +{"page_content": " environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/env/home/.docker\").toString());\n retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockEnvHomeXdgCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "metadata": {"chunk_id": "doc_33_chunk_22", "original_index": 22, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_22"}, "type": "Document"} +{"page_content": " @Test\n public void testCredentialHelper_cmdExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.cmd\").toPath();\n Path pathWithoutCmd = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutCmd.getParent().resolve(\"foo.cmd\"));\n\n", "metadata": {"chunk_id": "doc_33_chunk_23", "original_index": 23, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_23"}, "type": "Document"} +{"page_content": " DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString())\n .asList();\n\n", "metadata": {"chunk_id": "doc_33_chunk_24", "original_index": 24, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_24"}, "type": "Document"} +{"page_content": " assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "metadata": {"chunk_id": "doc_33_chunk_25", "original_index": 25, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_25"}, "type": "Document"} +{"page_content": " @Test\n public void testCredentialHelper_exeExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.exe\").toPath();\n Path pathWithoutExe = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutExe.getParent().resolve(\"foo.exe\"));\n\n", "metadata": {"chunk_id": "doc_33_chunk_26", "original_index": 26, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_26"}, "type": "Document"} +{"page_content": " DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString())\n .asList();\n\n", "metadata": {"chunk_id": "doc_33_chunk_27", "original_index": 27, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_27"}, "type": "Document"} +{"page_content": " assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n}\n", "metadata": {"chunk_id": "doc_33_chunk_28", "original_index": 28, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_28"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2019 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "metadata": {"chunk_id": "doc_34_chunk_0", "original_index": 0, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_0"}, "type": "Document"} +{"page_content": "package com.google.cloud.tools.jib.api;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;\nimport com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;\nimport com.google.cloud.tools.jib.api.buildplan.FilePermissions;\nimport com.google.common.collect.ArrayListMultimap;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Multimap;\nimport com.google.common.io.CharStreams;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\n", "metadata": {"chunk_id": "doc_34_chunk_1", "original_index": 1, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_1"}, "type": "Document"} +{"page_content": "import java.nio.file.Path;\nimport java.time.Instant;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.BiConsumer;\nimport java.util.zip.GZIPInputStream;\nimport org.apache.commons.compress.archivers.tar.TarArchiveEntry;\nimport org.apache.commons.compress.archivers.tar.TarArchiveInputStream;\nimport org.junit.BeforeClass;\nimport org.junit.ClassRule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\n", "metadata": {"chunk_id": "doc_34_chunk_2", "original_index": 2, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_2"}, "type": "Document"} +{"page_content": "/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n", "metadata": {"chunk_id": "doc_34_chunk_3", "original_index": 3, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_3"}, "type": "Document"} +{"page_content": " imageTar = new File(imageLocation.getRoot(), \"image.tar\");\n Containerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\n Jib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n }\n\n", "metadata": {"chunk_id": "doc_34_chunk_4", "original_index": 4, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_4"}, "type": "Document"} +{"page_content": " @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n", "metadata": {"chunk_id": "doc_34_chunk_5", "original_index": 5, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_5"}, "type": "Document"} +{"page_content": " @Test\n public void testManifest() throws IOException {\n String expectedManifest =\n \"[{\\\"Config\\\":\\\"config.json\\\",\\\"RepoTags\\\":[\\\"jib-core/reproducible:latest\\\"],\"\n + \"\\\"Layers\\\":[\\\"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\\\",\\\"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\\\",\\\"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\\\"]}]\";\n String generatedManifest = extractFromTarFileAsString(imageTar, \"manifest.json\");\n assertThat(generatedManifest).isEqualTo(expectedManifest);\n }\n\n", "metadata": {"chunk_id": "doc_34_chunk_6", "original_index": 6, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_6"}, "type": "Document"} +{"page_content": " @Test\n public void testConfiguration() throws IOException {\n String expectedConfig =\n \"{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\"\n + \"\\\"config\\\":{\\\"Env\\\":[],\\\"Entrypoint\\\":[\\\"echo\\\",\\\"Hello World\\\"],\\\"ExposedPorts\\\":{},\\\"Labels\\\":{},\\\"Volumes\\\":{}},\"\n + \"\\\"history\\\":[{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"}],\"\n", "metadata": {"chunk_id": "doc_34_chunk_7", "original_index": 7, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_7"}, "type": "Document"} +{"page_content": " + \"\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:18e4f44e6d1835bd968339b166057bd17ab7d4cbb56dc7262a5cafea7cf8d405\\\",\\\"sha256:13369c34f073f2b9c1fa6431e23d925f1a8eac65b1726c8cc8fcc2596c69b414\\\",\\\"sha256:4f92c507112d7880ca0f504ef8272b7fdee107263270125036a260a741565923\\\"]}}\";\n String generatedConfig = extractFromTarFileAsString(imageTar, \"config.json\");\n assertThat(generatedConfig).isEqualTo(expectedConfig);\n }\n\n @Test\n public void testImageLayout() throws IOException {\n Set paths = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n paths.add(layerEntry.getName());\n }\n });\n assertThat(paths)\n .containsExactly(\n \"app/fileA.txt\",\n \"app/fileB.txt\",\n \"app/fileC.txt\",\n \"app/fileD.txt\",\n \"app/subdir/fileE.txt\");\n }\n\n", "metadata": {"chunk_id": "doc_34_chunk_8", "original_index": 8, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_8"}, "type": "Document"} +{"page_content": " @Test\n public void testAllFileAndDirectories() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) ->\n assertThat(layerEntry.isFile() || layerEntry.isDirectory()).isTrue());\n }\n\n @Test\n public void testTimestampsEpochPlus1s() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) -> {\n Instant modificationTime = layerEntry.getLastModifiedDate().toInstant();\n assertThat(modificationTime).isEqualTo(Instant.ofEpochSecond(1));\n });\n }\n\n", "metadata": {"chunk_id": "doc_34_chunk_9", "original_index": 9, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_9"}, "type": "Document"} +{"page_content": " @Test\n public void testPermissions() throws IOException {\n assertThat(FilePermissions.DEFAULT_FILE_PERMISSIONS.getPermissionBits()).isEqualTo(0644);\n assertThat(FilePermissions.DEFAULT_FOLDER_PERMISSIONS.getPermissionBits()).isEqualTo(0755);\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0644);\n } else if (layerEntry.isDirectory()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0755);\n }\n });\n }\n\n", "metadata": {"chunk_id": "doc_34_chunk_10", "original_index": 10, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_10"}, "type": "Document"} +{"page_content": " @Test\n public void testNoImplicitParentDirectories() throws IOException {\n Set directories = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n String entryPath = layerEntry.getName();\n if (layerEntry.isDirectory()) {\n assertThat(entryPath.endsWith(\"/\")).isTrue();\n entryPath = entryPath.substring(0, entryPath.length() - 1);\n }\n\n int lastSlashPosition = entryPath.lastIndexOf('/');\n String parent = entryPath.substring(0, Math.max(0, lastSlashPosition));\n if (!parent.isEmpty()) {\n assertThat(directories.contains(parent)).isTrue();\n }\n if (layerEntry.isDirectory()) {\n directories.add(entryPath);\n }\n });\n }\n\n", "metadata": {"chunk_id": "doc_34_chunk_11", "original_index": 11, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_11"}, "type": "Document"} +{"page_content": " @Test\n public void testFileOrdering() throws IOException {\n Multimap layerPaths = ArrayListMultimap.create();\n layerEntriesDo((layerName, layerEntry) -> layerPaths.put(layerName, layerEntry.getName()));\n for (Collection paths : layerPaths.asMap().values()) {\n List sorted = new ArrayList<>(paths);\n // ReproducibleLayerBuilder sorts by TarArchiveEntry::getName()\n Collections.sort(sorted);\n assertThat(paths).containsExactlyElementsIn(sorted).inOrder();\n }\n }\n\n private void layerEntriesDo(BiConsumer layerConsumer)\n throws IOException {\n\n", "metadata": {"chunk_id": "doc_34_chunk_12", "original_index": 12, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_12"}, "type": "Document"} +{"page_content": " try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n String imageEntryName = imageEntry.getName();\n // assume all .tar.gz files are layers\n if (imageEntry.isFile() && imageEntryName.endsWith(\".tar.gz\")) {\n @SuppressWarnings(\"resource\") // must not close sub-streams\n TarArchiveInputStream layer = new TarArchiveInputStream(new GZIPInputStream(input));\n TarArchiveEntry layerEntry;\n while ((layerEntry = layer.getNextTarEntry()) != null) {\n layerConsumer.accept(imageEntryName, layerEntry);\n }\n }\n }\n }\n }\n\n", "metadata": {"chunk_id": "doc_34_chunk_13", "original_index": 13, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_13"}, "type": "Document"} +{"page_content": " private static String extractFromTarFileAsString(File tarFile, String filename)\n throws IOException {\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(tarFile.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n if (filename.equals(imageEntry.getName())) {\n return CharStreams.toString(new InputStreamReader(input, StandardCharsets.UTF_8));\n }\n }\n }\n throw new AssertionError(\"file not found: \" + filename);\n }\n}\n", "metadata": {"chunk_id": "doc_34_chunk_14", "original_index": 14, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_14"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "metadata": {"chunk_id": "doc_35_chunk_0", "original_index": 0, "pid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0_0"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "metadata": {"chunk_id": "doc_36_chunk_0", "original_index": 0, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_0"}, "type": "Document"} +{"page_content": "package com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n", "metadata": {"chunk_id": "doc_36_chunk_1", "original_index": 1, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_1"}, "type": "Document"} +{"page_content": " private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n", "metadata": {"chunk_id": "doc_36_chunk_2", "original_index": 2, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_2"}, "type": "Document"} +{"page_content": " @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n", "metadata": {"chunk_id": "doc_36_chunk_3", "original_index": 3, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_3"}, "type": "Document"} +{"page_content": " @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n", "metadata": {"chunk_id": "doc_36_chunk_4", "original_index": 4, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_4"}, "type": "Document"} +{"page_content": " @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n", "metadata": {"chunk_id": "doc_36_chunk_5", "original_index": 5, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_5"}, "type": "Document"} +{"page_content": " @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n", "metadata": {"chunk_id": "doc_36_chunk_6", "original_index": 6, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_6"}, "type": "Document"} +{"page_content": " @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n", "metadata": {"chunk_id": "doc_36_chunk_7", "original_index": 7, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_7"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.registry;\n\n", "metadata": {"chunk_id": "doc_37_chunk_0", "original_index": 0, "pid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_0"}, "type": "Document"} +{"page_content": "import com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.blob.Blob;\nimport com.google.cloud.tools.jib.event.EventHandlers;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.image.json.V22ManifestTemplate;\nimport com.google.common.io.ByteStreams;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.concurrent.atomic.LongAdder;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Test;\n\n", "metadata": {"chunk_id": "doc_37_chunk_1", "original_index": 1, "pid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_1"}, "type": "Document"} +{"page_content": "/** Integration tests for {@link BlobPuller}. */\npublic class BlobPullerIntegrationTest {\n\n private final FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n\n @Test\n public void testPull() throws IOException, RegistryException {\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n V22ManifestTemplate manifestTemplate =\n registryClient\n .pullManifest(\n ManifestPullerIntegrationTest.KNOWN_MANIFEST_V22_SHA, V22ManifestTemplate.class)\n .getManifest();\n\n DescriptorDigest realDigest = manifestTemplate.getLayers().get(0).getDigest();\n\n", "metadata": {"chunk_id": "doc_37_chunk_2", "original_index": 2, "pid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_2"}, "type": "Document"} +{"page_content": " // Pulls a layer BLOB of the distroless/base image.\n LongAdder totalByteCount = new LongAdder();\n LongAdder expectedSize = new LongAdder();\n Blob pulledBlob =\n registryClient.pullBlob(\n realDigest,\n size -> {\n Assert.assertEquals(0, expectedSize.sum());\n expectedSize.add(size);\n },\n totalByteCount::add);\n Assert.assertEquals(realDigest, pulledBlob.writeTo(ByteStreams.nullOutputStream()).getDigest());\n Assert.assertTrue(expectedSize.sum() > 0);\n Assert.assertEquals(expectedSize.sum(), totalByteCount.sum());\n }\n\n", "metadata": {"chunk_id": "doc_37_chunk_3", "original_index": 3, "pid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_3"}, "type": "Document"} +{"page_content": " @Test\n public void testPull_unknownBlob() throws IOException, DigestException {\n DescriptorDigest nonexistentDigest =\n DescriptorDigest.fromHash(\n \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\");\n\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n\n try {\n registryClient\n .pullBlob(nonexistentDigest, ignored -> {}, ignored -> {})\n .writeTo(ByteStreams.nullOutputStream());\n Assert.fail(\"Trying to pull nonexistent blob should have errored\");\n\n } catch (IOException ex) {\n if (!(ex.getCause() instanceof RegistryErrorException)) {\n throw ex;\n }\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"pull BLOB for gcr.io/distroless/base with digest \" + nonexistentDigest));\n }\n }\n}\n", "metadata": {"chunk_id": "doc_37_chunk_4", "original_index": 4, "pid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_4"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\n", "metadata": {"chunk_id": "doc_38_chunk_0", "original_index": 0, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_0"}, "type": "Document"} +{"page_content": "import com.google.api.client.http.HttpResponseException;\nimport com.google.api.client.http.HttpStatusCodes;\nimport com.google.cloud.tools.jib.api.CacheDirectoryCreationException;\nimport com.google.cloud.tools.jib.api.Containerizer;\nimport com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.ImageReference;\nimport com.google.cloud.tools.jib.api.InsecureRegistryException;\nimport com.google.cloud.tools.jib.api.JibContainer;\nimport com.google.cloud.tools.jib.api.JibContainerBuilder;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.api.RegistryUnauthorizedException;\nimport com.google.cloud.tools.jib.registry.RegistryCredentialsNotSentException;\nimport com.google.common.collect.ImmutableSet;\nimport java.io.IOException;\nimport java.net.UnknownHostException;\n", "metadata": {"chunk_id": "doc_38_chunk_1", "original_index": 1, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_1"}, "type": "Document"} +{"page_content": "import java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport org.apache.http.conn.HttpHostConnectException;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.Mockito;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n", "metadata": {"chunk_id": "doc_38_chunk_2", "original_index": 2, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_2"}, "type": "Document"} +{"page_content": "/** Tests for {@link JibBuildRunner}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class JibBuildRunnerTest {\n\n private static final HelpfulSuggestions TEST_HELPFUL_SUGGESTIONS =\n new HelpfulSuggestions(\n \"messagePrefix\", \"clearCacheCommand\", \"toConfig\", \"toFlag\", \"buildFile\");\n\n @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Mock private JibContainerBuilder mockJibContainerBuilder;\n @Mock private JibContainer mockJibContainer;\n @Mock private Containerizer mockContainerizer;\n @Mock private RegistryUnauthorizedException mockRegistryUnauthorizedException;\n @Mock private RegistryCredentialsNotSentException mockRegistryCredentialsNotSentException;\n @Mock private HttpResponseException mockHttpResponseException;\n\n", "metadata": {"chunk_id": "doc_38_chunk_3", "original_index": 3, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_3"}, "type": "Document"} +{"page_content": " private JibBuildRunner testJibBuildRunner;\n\n @Before\n public void setUpMocks() {\n testJibBuildRunner =\n new JibBuildRunner(\n mockJibContainerBuilder,\n mockContainerizer,\n ignored -> {},\n TEST_HELPFUL_SUGGESTIONS,\n \"ignored\",\n \"ignored\");\n }\n\n @Test\n public void testBuildImage_pass()\n throws BuildStepsExecutionException, IOException, CacheDirectoryCreationException {\n JibContainer buildResult = testJibBuildRunner.runBuild();\n Assert.assertNull(buildResult);\n }\n\n", "metadata": {"chunk_id": "doc_38_chunk_4", "original_index": 4, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_4"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_httpHostConnectException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n HttpHostConnectException mockHttpHostConnectException =\n Mockito.mock(HttpHostConnectException.class);\n Mockito.doThrow(mockHttpHostConnectException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forHttpHostConnect(), ex.getMessage());\n }\n }\n\n", "metadata": {"chunk_id": "doc_38_chunk_5", "original_index": 5, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_5"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_unknownHostException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n UnknownHostException mockUnknownHostException = Mockito.mock(UnknownHostException.class);\n Mockito.doThrow(mockUnknownHostException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forUnknownHost(), ex.getMessage());\n }\n }\n\n", "metadata": {"chunk_id": "doc_38_chunk_6", "original_index": 6, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_6"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_insecureRegistryException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n InsecureRegistryException mockInsecureRegistryException =\n Mockito.mock(InsecureRegistryException.class);\n Mockito.doThrow(mockInsecureRegistryException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forInsecureRegistry(), ex.getMessage());\n }\n }\n\n", "metadata": {"chunk_id": "doc_38_chunk_7", "original_index": 7, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_7"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_registryUnauthorizedException_statusCodeForbidden()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode())\n .thenReturn(HttpStatusCodes.STATUS_CODE_FORBIDDEN);\n\n", "metadata": {"chunk_id": "doc_38_chunk_8", "original_index": 8, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_8"}, "type": "Document"} +{"page_content": " Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forHttpStatusCodeForbidden(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n", "metadata": {"chunk_id": "doc_38_chunk_9", "original_index": 9, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_9"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_registryUnauthorizedException_noCredentials()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode()).thenReturn(-1); // Unknown\n\n Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forNoCredentialsDefined(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n", "metadata": {"chunk_id": "doc_38_chunk_10", "original_index": 10, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_10"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_registryCredentialsNotSentException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(mockRegistryCredentialsNotSentException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forCredentialsNotSent(), ex.getMessage());\n }\n }\n\n", "metadata": {"chunk_id": "doc_38_chunk_11", "original_index": 11, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_11"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_other()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(new RegistryException(\"messagePrefix\"))\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.none(), ex.getMessage());\n }\n }\n\n", "metadata": {"chunk_id": "doc_38_chunk_12", "original_index": 12, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_12"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_writesImageJson() throws Exception {\n final ImageReference targetImageReference = ImageReference.parse(\"gcr.io/distroless/java:11\");\n final String imageId =\n \"sha256:61bb3ec31a47cb730eb58a38bbfa813761a51dca69d10e39c24c3d00a7b2c7a9\";\n final String digest = \"sha256:3f1be7e19129edb202c071a659a4db35280ab2bb1a16f223bfd5d1948657b6fc\";\n final Set tags = ImmutableSet.of(\"latest\", \"0.1.41-69d10e-20200116T101403\");\n\n final Path outputPath = temporaryFolder.newFile(\"jib-image.json\").toPath();\n\n", "metadata": {"chunk_id": "doc_38_chunk_13", "original_index": 13, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_13"}, "type": "Document"} +{"page_content": " Mockito.when(mockJibContainer.getTargetImage()).thenReturn(targetImageReference);\n Mockito.when(mockJibContainer.getImageId()).thenReturn(DescriptorDigest.fromDigest(imageId));\n Mockito.when(mockJibContainer.getDigest()).thenReturn(DescriptorDigest.fromDigest(digest));\n Mockito.when(mockJibContainer.getTags()).thenReturn(tags);\n Mockito.when(mockJibContainerBuilder.containerize(mockContainerizer))\n .thenReturn(mockJibContainer);\n Mockito.when(mockJibContainer.isImagePushed()).thenReturn(true);\n testJibBuildRunner.writeImageJson(outputPath).runBuild();\n\n", "metadata": {"chunk_id": "doc_38_chunk_14", "original_index": 14, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_14"}, "type": "Document"} +{"page_content": " final String outputJson = new String(Files.readAllBytes(outputPath), StandardCharsets.UTF_8);\n final ImageMetadataOutput metadataOutput = ImageMetadataOutput.fromJson(outputJson);\n Assert.assertEquals(targetImageReference.toString(), metadataOutput.getImage());\n Assert.assertEquals(imageId, metadataOutput.getImageId());\n Assert.assertEquals(digest, metadataOutput.getImageDigest());\n Assert.assertEquals(tags, ImmutableSet.copyOf(metadataOutput.getTags()));\n Assert.assertTrue(metadataOutput.isImagePushed());\n }\n}\n", "metadata": {"chunk_id": "doc_38_chunk_15", "original_index": 15, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_15"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "metadata": {"chunk_id": "doc_39_chunk_0", "original_index": 0, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_0"}, "type": "Document"} +{"page_content": "package com.google.cloud.tools.jib.maven;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.Command;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.Arrays;\nimport org.apache.maven.it.VerificationException;\nimport org.apache.maven.it.Verifier;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Assume;\nimport org.junit.ClassRule;\nimport org.junit.Test;\n\n/** Integration tests for {@link BuildDockerMojo}. */\npublic class BuildDockerMojoIntegrationTest {\n\n @ClassRule public static final TestProject simpleTestProject = new TestProject(\"simple\");\n\n @ClassRule public static final TestProject emptyTestProject = new TestProject(\"empty\");\n\n @ClassRule\n public static final TestProject defaultTargetTestProject = new TestProject(\"default-target\");\n\n", "metadata": {"chunk_id": "doc_39_chunk_1", "original_index": 1, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_1"}, "type": "Document"} +{"page_content": " private static void buildToDockerDaemon(TestProject project, String imageReference, String pomXml)\n throws VerificationException, DigestException, IOException {\n Verifier verifier = new Verifier(project.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", imageReference);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=\" + pomXml);\n verifier.executeGoal(\"package\");\n\n", "metadata": {"chunk_id": "doc_39_chunk_2", "original_index": 2, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_2"}, "type": "Document"} +{"page_content": " verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyErrorFreeLog();\n\n BuildImageMojoIntegrationTest.readDigestFile(\n project.getProjectRoot().resolve(\"target/jib-image.digest\"));\n }\n\n /**\n * Builds and runs jib:buildDocker on a project at {@code projectRoot} pushing to {@code\n * imageReference}.\n */\n private static String buildToDockerDaemonAndRun(TestProject project, String imageReference)\n throws VerificationException, IOException, InterruptedException, DigestException {\n buildToDockerDaemon(project, imageReference, \"pom.xml\");\n\n", "metadata": {"chunk_id": "doc_39_chunk_3", "original_index": 3, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_3"}, "type": "Document"} +{"page_content": " String dockerInspectVolumes =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Volumes}}'\", imageReference).run();\n String dockerInspectExposedPorts =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.ExposedPorts}}'\", imageReference)\n .run();\n String dockerInspectLabels =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Labels}}'\", imageReference).run();\n String history = new Command(\"docker\", \"history\", imageReference).run();\n\n MatcherAssert.assertThat(\n dockerInspectVolumes, CoreMatchers.containsString(\"\\\"/var/log\\\":{},\\\"/var/log2\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectExposedPorts,\n CoreMatchers.containsString(\n \"\\\"1000/tcp\\\":{},\\\"2000/udp\\\":{},\\\"2001/udp\\\":{},\\\"2002/udp\\\":{},\\\"2003/udp\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectLabels,\n CoreMatchers.containsString(\"\\\"key1\\\":\\\"value1\\\",\\\"key2\\\":\\\"value2\\\"\"));\n\n", "metadata": {"chunk_id": "doc_39_chunk_4", "original_index": 4, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_4"}, "type": "Document"} +{"page_content": " return new Command(\"docker\", \"run\", \"--rm\", imageReference).run();\n }\n\n @Test\n public void testExecute_simple()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\n \"Hello, world. An argument.\\n1970-01-01T00:00:01Z\\nrw-r--r--\\nrw-r--r--\\nfoo\\ncat\\n\"\n + \"1970-01-01T00:00:01Z\\n1970-01-01T00:00:01Z\\n\",\n buildToDockerDaemonAndRun(simpleTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n", "metadata": {"chunk_id": "doc_39_chunk_5", "original_index": 5, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_5"}, "type": "Document"} +{"page_content": " @Test\n public void testExecute_simple_extraDirectoriesFiltering()\n throws DigestException, IOException, InterruptedException, VerificationException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-extra-dirs-filtering.xml\");\n String output =\n new Command(\"docker\", \"run\", \"--rm\", \"--entrypoint=ls\", targetImage, \"-1R\", \"/extras\")\n .run();\n\n // /extras/cat.txt\n // /extras/foo\n // /extras/sub/\n // /extras/sub/a.json\n assertThat(output).isEqualTo(\"/extras:\\ncat.txt\\nfoo\\nsub\\n\\n/extras/sub:\\na.json\\n\");\n }\n\n @Test\n public void testExecute_dockerClient()\n throws VerificationException, IOException, InterruptedException {\n Assume.assumeFalse(System.getProperty(\"os.name\").startsWith(\"Windows\"));\n new Command(\n \"chmod\", \"+x\", simpleTestProject.getProjectRoot().resolve(\"mock-docker.sh\").toString())\n .run();\n\n", "metadata": {"chunk_id": "doc_39_chunk_6", "original_index": 6, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_6"}, "type": "Document"} +{"page_content": " String targetImage = \"simpleimage:maven\" + System.nanoTime();\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-dockerclient.xml\");\n verifier.addCliOption(\"--debug\");\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyTextInLog(\"Docker load called. value1 value2\");\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_empty()\n throws InterruptedException, IOException, VerificationException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n\n", "metadata": {"chunk_id": "doc_39_chunk_7", "original_index": 7, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_7"}, "type": "Document"} +{"page_content": " Assert.assertEquals(\"\", buildToDockerDaemonAndRun(emptyTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_defaultTarget()\n throws VerificationException, IOException, InterruptedException, DigestException {\n Assert.assertEquals(\n \"Hello, world. An argument.\\n\",\n buildToDockerDaemonAndRun(\n defaultTargetTestProject, \"default-target-name:default-target-version\"));\n }\n\n", "metadata": {"chunk_id": "doc_39_chunk_8", "original_index": 8, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_8"}, "type": "Document"} +{"page_content": " @Test\n public void testExecute_jibSkip() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibSkip(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_jibContainerizeSkips() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibContainerizeSkips(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_userNumeric()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom.xml\");\n Assert.assertEquals(\n \"12345:54321\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n", "metadata": {"chunk_id": "doc_39_chunk_9", "original_index": 9, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_9"}, "type": "Document"} +{"page_content": " @Test\n public void testExecute_userNames()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"brokenuserimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom-broken-user.xml\");\n Assert.assertEquals(\n \"myuser:mygroup\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_noToImageAndInvalidProjectName()\n throws DigestException, VerificationException, IOException, InterruptedException {\n buildToDockerDaemon(simpleTestProject, \"image reference ignored\", \"pom-no-to-image.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", \"my-artifact-id:1\").run());\n }\n\n", "metadata": {"chunk_id": "doc_39_chunk_10", "original_index": 10, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_10"}, "type": "Document"} +{"page_content": " @Test\n public void testExecute_jarContainerization()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"jarcontainerizationimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-jar-containerization.xml\");\n Assert.assertEquals(\n \"Hello, world. \\nImplementation-Title: hello-world\\nImplementation-Version: 1\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testExecute_jarContainerizationOnMissingJar() throws IOException {\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"_TARGET_IMAGE\", \"erroronmissingjar\");\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-jar-containerization.xml\");\n verifier.executeGoals(Arrays.asList(\"clean\", \"jib:dockerBuild\"));\n Assert.fail();\n\n", "metadata": {"chunk_id": "doc_39_chunk_11", "original_index": 11, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_11"}, "type": "Document"} +{"page_content": " } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"Obtaining project build output files failed; make sure you have packaged your \"\n + \"project before trying to build the image. (Did you accidentally run \\\"mvn \"\n + \"clean jib:build\\\" instead of \\\"mvn clean package jib:build\\\"?)\"));\n }\n }\n\n @Test\n public void testExecute_jibRequireVersion_ok() throws VerificationException, IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n", "metadata": {"chunk_id": "doc_39_chunk_12", "original_index": 12, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_12"}, "type": "Document"} +{"page_content": " Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n // this plugin should match 1.0\n verifier.setSystemProperty(\"jib.requiredVersion\", \"1.0\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_jibRequireVersion_fail() throws IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n", "metadata": {"chunk_id": "doc_39_chunk_13", "original_index": 13, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_13"}, "type": "Document"} +{"page_content": " try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.requiredVersion\", \"[,1.0]\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n Assert.fail();\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(), CoreMatchers.containsString(\"but is required to be [,1.0]\"));\n }\n }\n\n", "metadata": {"chunk_id": "doc_39_chunk_14", "original_index": 14, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_14"}, "type": "Document"} +{"page_content": " @Test\n public void testCredHelperConfigurationSimple()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-1.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testCredHelperConfigurationComplex()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-2.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n}\n", "metadata": {"chunk_id": "doc_39_chunk_15", "original_index": 15, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_15"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2017 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "metadata": {"chunk_id": "doc_40_chunk_0", "original_index": 0, "pid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09_0"}, "type": "Document"} +{"page_content": "package com.google.cloud.tools.jib.api;\n\nimport java.text.MessageFormat;\n\n/** Thrown because registry authentication failed. */\npublic class RegistryAuthenticationFailedException extends RegistryException {\n\n private static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n private final String serverUrl;\n private final String imageName;\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param cause the underlying cause that triggered this exception\n */\n public RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n", "metadata": {"chunk_id": "doc_40_chunk_1", "original_index": 1, "pid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09_1"}, "type": "Document"} +{"page_content": " /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param reason the underlying reason that triggered this exception\n */\n public RegistryAuthenticationFailedException(String serverUrl, String imageName, String reason) {\n super(MessageFormat.format(REASON, serverUrl, imageName, reason));\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * The server being authenticated.\n *\n * @return the server being authenticated\n */\n public String getServerUrl() {\n return serverUrl;\n }\n\n /**\n * The image being authenticated.\n *\n * @return the image being authenticated\n */\n public String getImageName() {\n return imageName;\n }\n}\n", "metadata": {"chunk_id": "doc_40_chunk_2", "original_index": 2, "pid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09_2"}, "type": "Document"} +{"page_content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n", "metadata": {"chunk_id": "doc_41_chunk_0", "original_index": 0, "pid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b_0"}, "type": "Document"} +{"page_content": " @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n", "metadata": {"chunk_id": "doc_41_chunk_1", "original_index": 1, "pid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b_1"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\n", "metadata": {"chunk_id": "doc_42_chunk_0", "original_index": 0, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_0"}, "type": "Document"} +{"page_content": "package com.password4j;\n\n/**\n * Class in the hierarchy to avoid code duplication.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic abstract class AbstractHashingFunction implements HashingFunction\n{\n\n /**\n * Compares two {@link CharSequence}s as byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first CharSequence\n * @param b the second CharSequence\n * @return true if both {@link CharSequence}s are the same, false if not\n */\n protected static boolean slowEquals(CharSequence a, CharSequence b)\n {\n return slowEquals(Utils.fromCharSequenceToBytes(a), Utils.fromCharSequenceToBytes(b));\n }\n\n", "metadata": {"chunk_id": "doc_42_chunk_1", "original_index": 1, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_1"}, "type": "Document"} +{"page_content": " /**\n * Compares two byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first byte array\n * @param b the second byte array\n * @return true if both byte arrays are the same, false if not\n */\n protected static boolean slowEquals(byte[] a, byte[] b)\n {\n int diff = a.length ^ b.length;\n for (int i = 0; i < a.length && i < b.length; i++)\n {\n diff |= a[i] ^ b[i];\n }\n return diff == 0;\n }\n\n", "metadata": {"chunk_id": "doc_42_chunk_2", "original_index": 2, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_2"}, "type": "Document"} +{"page_content": " @Override\n public Hash hash(CharSequence plainTextPassword, String salt, CharSequence pepper)\n {\n CharSequence peppered = Utils.append(pepper, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n @Override\n public Hash hash(byte[] plainTextPassword, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n byte[] peppered = Utils.append(pepperAsBytes, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n", "metadata": {"chunk_id": "doc_42_chunk_3", "original_index": 3, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_3"}, "type": "Document"} +{"page_content": " /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 0.1.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n", "metadata": {"chunk_id": "doc_42_chunk_4", "original_index": 4, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_4"}, "type": "Document"} +{"page_content": " /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password as bytes array\n * @param hashed the hash as bytes array\n * @param salt the salt as bytes array used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n", "metadata": {"chunk_id": "doc_42_chunk_5", "original_index": 5, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_5"}, "type": "Document"} +{"page_content": " /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.5.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt, CharSequence pepper)\n {\n return check(Utils.append(pepper, plainTextPassword), hashed, salt);\n }\n\n", "metadata": {"chunk_id": "doc_42_chunk_6", "original_index": 6, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_6"}, "type": "Document"} +{"page_content": " /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n return check(Utils.append(pepperAsBytes, plainTextPassword), hashed, salt);\n }\n\n}\n", "metadata": {"chunk_id": "doc_42_chunk_7", "original_index": 7, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_7"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n", "metadata": {"chunk_id": "doc_43_chunk_0", "original_index": 0, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_0"}, "type": "Document"} +{"page_content": " private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n", "metadata": {"chunk_id": "doc_43_chunk_1", "original_index": 1, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_1"}, "type": "Document"} +{"page_content": " // Multiple threads\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 4, 3, \"1832bd8e5cbeba1cb174a13838095e7e66508e9bf04c40178990adbc8ba9eb6f\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 2, 3, \"f8767fe04059cef67b4427cda99bf8bcdd983959dbd399a5e63ea04523716c23\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 3, 3, \"bcad257eff3d1090b50276514857e60db5d0ec484129013ef3c88f7d36e438d6\"},\n", "metadata": {"chunk_id": "doc_43_chunk_2", "original_index": 2, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_2"}, "type": "Document"} +{"page_content": " new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n", "metadata": {"chunk_id": "doc_43_chunk_3", "original_index": 3, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_3"}, "type": "Document"} +{"page_content": " };\n\n\n @Test\n public void test()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = new BalloonHashingFunction((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n }\n\n }\n\n @Test\n public void testInstance()\n {\n\n", "metadata": {"chunk_id": "doc_43_chunk_4", "original_index": 4, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_4"}, "type": "Document"} +{"page_content": " BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n", "metadata": {"chunk_id": "doc_43_chunk_5", "original_index": 5, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_5"}, "type": "Document"} +{"page_content": " Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n Assert.assertTrue(balloonHashingFunction.check(((String) testVector[0]).getBytes(), ((String) testVector[7]).getBytes(), ((String) testVector[1]).getBytes()));\n }\n\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String m = \"SHA-256\";\n int i = 2;\n int p = 3;\n int l = 4;\n int v = 5;\n BalloonHashingFunction balloonHashingFunction = BalloonHashingFunction.getInstance(m, i, p, l, v);\n\n", "metadata": {"chunk_id": "doc_43_chunk_6", "original_index": 6, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_6"}, "type": "Document"} +{"page_content": " // THEN\n boolean eqNull = balloonHashingFunction.equals(null);\n boolean eqClass = balloonHashingFunction.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = balloonHashingFunction.equals(BalloonHashingFunction.getInstance(m, i, p, l, v));\n boolean sameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v));\n String toString = balloonHashingFunction.toString();\n int hashCode = balloonHashingFunction.hashCode();\n boolean notSameInst1 = balloonHashingFunction.equals(new BalloonHashingFunction(\"SHA-512\", i, p, l, v));\n", "metadata": {"chunk_id": "doc_43_chunk_7", "original_index": 7, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_7"}, "type": "Document"} +{"page_content": " boolean notSameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i+1, p, l, v));\n boolean notSameInst3 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p+1, l, v));\n boolean notSameInst4 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l+1, v));\n boolean notSameInst6 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v+1));\n\n", "metadata": {"chunk_id": "doc_43_chunk_8", "original_index": 8, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_8"}, "type": "Document"} +{"page_content": " // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n", "metadata": {"chunk_id": "doc_43_chunk_9", "original_index": 9, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_9"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\n\n\nclass Blake2b\n{\n private static final long[] IV = { 0x6a09e667f3bcc908L, 0xbb67ae8584caa73bL, 0x3c6ef372fe94f82bL, 0xa54ff53a5f1d36f1L,\n 0x510e527fade682d1L, 0x9b05688c2b3e6c1fL, 0x1f83d9abfb41bd6bL, 0x5be0cd19137e2179L };\n\n", "metadata": {"chunk_id": "doc_44_chunk_0", "original_index": 0, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_0"}, "type": "Document"} +{"page_content": " private static final byte[][] SIGMA = { { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 }, { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },\n { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 }, { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },\n { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 }, { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },\n { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 }, { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },\n { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 } };\n\n", "metadata": {"chunk_id": "doc_44_chunk_1", "original_index": 1, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_1"}, "type": "Document"} +{"page_content": " private static final int ROUNDS = 12;\n\n private static final int BLOCK_LENGTH_BYTES = 128;\n\n private final int digestLength;\n\n private final int keyLength;\n\n private final byte[] buffer;\n\n private final long[] internalState = new long[16];\n\n private int bufferPos = 0;\n\n private long[] chainValue = null;\n\n private long t0 = 0L;\n\n private long t1 = 0L;\n\n private long f0 = 0L;\n\n", "metadata": {"chunk_id": "doc_44_chunk_2", "original_index": 2, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_2"}, "type": "Document"} +{"page_content": " /**\n * Basic sized constructor - size in bytes.\n *\n * @param digestSize size of the digest in bytes\n */\n Blake2b(int digestSize)\n {\n if (digestSize < 1 || digestSize > 64)\n {\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n }\n\n buffer = new byte[BLOCK_LENGTH_BYTES];\n keyLength = 0;\n this.digestLength = digestSize;\n init();\n }\n\n", "metadata": {"chunk_id": "doc_44_chunk_3", "original_index": 3, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_3"}, "type": "Document"} +{"page_content": " // initialize chainValue\n private void init()\n {\n chainValue = new long[8];\n chainValue[0] = IV[0] ^ (digestLength | ((long) keyLength << 8) | 0x1010000);\n chainValue[1] = IV[1];\n chainValue[2] = IV[2];\n chainValue[3] = IV[3];\n chainValue[4] = IV[4];\n chainValue[5] = IV[5];\n chainValue[6] = IV[6];\n chainValue[7] = IV[7];\n }\n\n", "metadata": {"chunk_id": "doc_44_chunk_4", "original_index": 4, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_4"}, "type": "Document"} +{"page_content": " private void initializeInternalState()\n {\n System.arraycopy(chainValue, 0, internalState, 0, chainValue.length);\n System.arraycopy(IV, 0, internalState, chainValue.length, 4);\n internalState[12] = t0 ^ IV[4];\n internalState[13] = t1 ^ IV[5];\n internalState[14] = f0 ^ IV[6];\n internalState[15] = IV[7];// ^ f1 with f1 = 0\n }\n\n", "metadata": {"chunk_id": "doc_44_chunk_5", "original_index": 5, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_5"}, "type": "Document"} +{"page_content": " void update(byte[] message)\n {\n if (message == null)\n {\n return;\n }\n update(message, 0, message.length);\n }\n\n /**\n * update the message digest with a block of bytes.\n *\n * @param message the byte array containing the data.\n * @param offset the offset into the byte array where the data starts.\n * @param len the length of the data.\n */\n void update(byte[] message, int offset, int len)\n {\n int remainingLength = 0;\n\n", "metadata": {"chunk_id": "doc_44_chunk_6", "original_index": 6, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_6"}, "type": "Document"} +{"page_content": " if (bufferPos != 0)\n {\n remainingLength = BLOCK_LENGTH_BYTES - bufferPos;\n if (remainingLength < len)\n {\n System.arraycopy(message, offset, buffer, bufferPos, remainingLength);\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n bufferPos = 0;\n Arrays.fill(buffer, (byte) 0);// clear buffer\n }\n else\n {\n System.arraycopy(message, offset, buffer, bufferPos, len);\n bufferPos += len;\n return;\n }\n }\n\n", "metadata": {"chunk_id": "doc_44_chunk_7", "original_index": 7, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_7"}, "type": "Document"} +{"page_content": " int messagePos;\n int blockWiseLastPos = offset + len - BLOCK_LENGTH_BYTES;\n for (messagePos = offset + remainingLength; messagePos < blockWiseLastPos; messagePos += BLOCK_LENGTH_BYTES)\n {\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(message, messagePos);\n }\n\n", "metadata": {"chunk_id": "doc_44_chunk_8", "original_index": 8, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_8"}, "type": "Document"} +{"page_content": " // fill the buffer with left bytes, this might be a full block\n System.arraycopy(message, messagePos, buffer, 0, offset + len - messagePos);\n bufferPos += offset + len - messagePos;\n }\n\n /**\n * close the digest, producing the final digest value. The doFinal\n * call leaves the digest reset.\n * Key, salt and personal string remain.\n *\n * @param out the array the digest is to be copied into.\n * @param outOffset the offset into the out array the digest is to start at.\n */\n void doFinal(byte[] out, int outOffset)\n {\n\n", "metadata": {"chunk_id": "doc_44_chunk_9", "original_index": 9, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_9"}, "type": "Document"} +{"page_content": " f0 = 0xFFFFFFFFFFFFFFFFL;\n t0 += bufferPos;\n if (bufferPos > 0 && t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n Arrays.fill(buffer, (byte) 0);// Holds eventually the key if input is null\n Arrays.fill(internalState, 0L);\n\n for (int i = 0; i < chainValue.length && (i * 8 < digestLength); i++)\n {\n byte[] bytes = Utils.longToLittleEndian(chainValue[i]);\n\n", "metadata": {"chunk_id": "doc_44_chunk_10", "original_index": 10, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_10"}, "type": "Document"} +{"page_content": " if (i * 8 < digestLength - 8)\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, 8);\n }\n else\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, digestLength - (i * 8));\n }\n }\n\n Arrays.fill(chainValue, 0L);\n\n reset();\n }\n\n", "metadata": {"chunk_id": "doc_44_chunk_11", "original_index": 11, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_11"}, "type": "Document"} +{"page_content": " /**\n * Reset the digest back to it's initial state.\n * The key, the salt and the personal string will\n * remain for further computations.\n */\n void reset()\n {\n bufferPos = 0;\n f0 = 0L;\n t0 = 0L;\n t1 = 0L;\n chainValue = null;\n Arrays.fill(buffer, (byte) 0);\n init();\n }\n\n", "metadata": {"chunk_id": "doc_44_chunk_12", "original_index": 12, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_12"}, "type": "Document"} +{"page_content": " private void compress(byte[] message, int messagePos)\n {\n\n initializeInternalState();\n\n long[] m = new long[16];\n for (int j = 0; j < 16; j++)\n {\n m[j] = Utils.littleEndianToLong(message, messagePos + j * 8);\n }\n\n for (int round = 0; round < ROUNDS; round++)\n {\n\n", "metadata": {"chunk_id": "doc_44_chunk_13", "original_index": 13, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_13"}, "type": "Document"} +{"page_content": " // G apply to columns of internalState:m[blake2b_sigma[round][2 *\n // blockPos]] /+1\n functionG(m[SIGMA[round][0]], m[SIGMA[round][1]], 0, 4, 8, 12);\n functionG(m[SIGMA[round][2]], m[SIGMA[round][3]], 1, 5, 9, 13);\n functionG(m[SIGMA[round][4]], m[SIGMA[round][5]], 2, 6, 10, 14);\n functionG(m[SIGMA[round][6]], m[SIGMA[round][7]], 3, 7, 11, 15);\n // G apply to diagonals of internalState:\n functionG(m[SIGMA[round][8]], m[SIGMA[round][9]], 0, 5, 10, 15);\n functionG(m[SIGMA[round][10]], m[SIGMA[round][11]], 1, 6, 11, 12);\n functionG(m[SIGMA[round][12]], m[SIGMA[round][13]], 2, 7, 8, 13);\n functionG(m[SIGMA[round][14]], m[SIGMA[round][15]], 3, 4, 9, 14);\n }\n\n", "metadata": {"chunk_id": "doc_44_chunk_14", "original_index": 14, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_14"}, "type": "Document"} +{"page_content": " // update chain values:\n for (int offset = 0; offset < chainValue.length; offset++)\n {\n chainValue[offset] = chainValue[offset] ^ internalState[offset] ^ internalState[offset + 8];\n }\n }\n\n private void functionG(long m1, long m2, int posA, int posB, int posC, int posD)\n {\n\n", "metadata": {"chunk_id": "doc_44_chunk_15", "original_index": 15, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_15"}, "type": "Document"} +{"page_content": " internalState[posA] = internalState[posA] + internalState[posB] + m1;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 32);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 24); // replaces 25 of BLAKE\n internalState[posA] = internalState[posA] + internalState[posB] + m2;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 16);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 63); // replaces 11 of BLAKE\n }\n}\n", "metadata": {"chunk_id": "doc_44_chunk_16", "original_index": 16, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_16"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "metadata": {"chunk_id": "doc_45_chunk_0", "original_index": 0, "pid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_0"}, "type": "Document"} +{"page_content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "metadata": {"chunk_id": "doc_45_chunk_1", "original_index": 1, "pid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_1"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\nimport java.util.Objects;\n\n", "metadata": {"chunk_id": "doc_46_chunk_0", "original_index": 0, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_0"}, "type": "Document"} +{"page_content": "\n/**\n * This class contains all the information computed after\n * calculating a cryptographic hash.\n *

\n * The same {@link HashingFunction} used to generate the hash\n * is used to verify the plain password; in addition cryptographic\n * seasoning such as salt and pepper are stored in this object.\n *

\n * A hash is the product of a one-way function that maps data of arbitrary size to\n * fixed-size values; it is called hashing function (HF).\n * This class represent hashes generated by cryptographic hash function (CHF),\n * where each function has the following properties:\n *

    \n *
  • it is deterministic, meaning that the same message always results in the same hash
  • \n *
  • it is quick to compute the hash value for any given message
  • \n *
  • it is infeasible to generate a message that yields a given hash value
  • \n *
  • it is infeasible to find two different messages with the same hash value
  • \n", "metadata": {"chunk_id": "doc_46_chunk_1", "original_index": 1, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_1"}, "type": "Document"} +{"page_content": " *
  • a small change to a message should change the hash value so extensively that the new hash value\n * appears uncorrelated with the old hash value
  • \n *
\n *

\n * A salt is a unique, randomly generated string that is added to each password as part of the hashing process.\n * As the salt is unique for every user, an attacker has to crack hashes one at a time using the respective salt,\n * rather than being able to calculate a hash once and compare it against every stored hash.\n *

\n", "metadata": {"chunk_id": "doc_46_chunk_2", "original_index": 2, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_2"}, "type": "Document"} +{"page_content": " * A pepper can be used in additional to salting to provide an additional layer of protection.\n * It is similar to a salt, but has two key differences:\n *

    \n *
  • The pepper is shared between all stored passwords, rather than being unique like a salt.
  • \n *
  • The pepper is not stored in the database, unlike the salts.
  • \n *
\n *\n * @author David Bertoldi\n * @see
OWASP Password Storage Cheat Sheet\n * @see Key derivation function\n * @see Cryptographic hash function\n * @since 0.1.0\n */\npublic class Hash\n{\n\n", "metadata": {"chunk_id": "doc_46_chunk_3", "original_index": 3, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_3"}, "type": "Document"} +{"page_content": " /**\n * Represents the full output of a cryptographic hashing function.\n * Depending on the implementation of the CHF, it may contain\n * the salt and the configuration of the CHF itself.\n */\n private byte[] result;\n\n /**\n * Represents the computed output of a cryptographic hashing function.\n * It never contains salt and other configurations.\n */\n private byte[] bytes;\n\n /**\n * Represents the salt: random data that is used as an additional input\n * to a cryptographic hashing function.\n */\n private byte[] salt;\n\n", "metadata": {"chunk_id": "doc_46_chunk_4", "original_index": 4, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_4"}, "type": "Document"} +{"page_content": " /**\n * Represents the pepper: a secret added to the input password\n * prior to being hashed with a cryptographic hash function\n */\n private CharSequence pepper;\n\n /**\n * Represents the hashing function used to generate this object.\n *\n * @see HashingFunction for more details\n */\n private HashingFunction hashingFunction;\n\n /**\n * It is meant to not be used if not internally.\n * The other constructor must be used instead.\n *\n * @see Hash#Hash(HashingFunction, String, byte[], String)\n * @since 0.1.0\n */\n @SuppressWarnings(\"unused\")\n private Hash()\n {\n //\n }\n\n", "metadata": {"chunk_id": "doc_46_chunk_5", "original_index": 5, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_5"}, "type": "Document"} +{"page_content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n", "metadata": {"chunk_id": "doc_46_chunk_6", "original_index": 6, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_6"}, "type": "Document"} +{"page_content": " * @deprecated As of 1.8.1 because of the salt conversion from {@link String} to byte[].\n * {@link Hash#Hash(HashingFunction, String, byte[], byte[])} should be used instead.\n */\n @Deprecated\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, String salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, Utils.fromCharSequenceToBytes(salt));\n }\n\n\n\n", "metadata": {"chunk_id": "doc_46_chunk_7", "original_index": 7, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_7"}, "type": "Document"} +{"page_content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n */\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, byte[] salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, salt);\n }\n\n", "metadata": {"chunk_id": "doc_46_chunk_8", "original_index": 8, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_8"}, "type": "Document"} +{"page_content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash as bytes array.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n", "metadata": {"chunk_id": "doc_46_chunk_9", "original_index": 9, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_9"}, "type": "Document"} +{"page_content": " * @param salt the salt used for the computation as bytes array.\n * @since 1.7.0\n */\n public Hash(HashingFunction hashingFunction, byte[] result, byte[] bytes, byte[] salt)\n {\n this.hashingFunction = hashingFunction;\n this.salt = salt;\n this.result = result;\n this.bytes = bytes;\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public String getResult()\n {\n return Utils.fromBytesToString(result);\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public byte[] getResultAsBytes()\n {\n return result;\n }\n\n /**\n * Retrieves the hash as byte array and without the parameters\n * encoded in the final hash.\n *\n * @return the hash.\n * @since 1.5.1\n */\n public byte[] getBytes()\n {\n return bytes;\n }\n\n", "metadata": {"chunk_id": "doc_46_chunk_10", "original_index": 10, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_10"}, "type": "Document"} +{"page_content": " /**\n * Retrieves the {@link HashingFunction} used\n * to hash the password.\n *\n * @return the CHF\n * @since 0.4.0\n */\n public HashingFunction getHashingFunction()\n {\n return hashingFunction;\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as {@link String}.\n * @since 0.1.0\n */\n public String getSalt()\n {\n return Utils.fromBytesToString(salt);\n }\n\n", "metadata": {"chunk_id": "doc_46_chunk_11", "original_index": 11, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_11"}, "type": "Document"} +{"page_content": " /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as bytes array.\n * @since 1.7.0\n */\n public byte[] getSaltBytes()\n {\n return salt;\n }\n\n /**\n * Retrieves the pepper used with the password in the hashing function.\n *\n * @return the pepper.\n * @since 0.1.0\n */\n public CharSequence getPepper()\n {\n return pepper;\n }\n\n /**\n * Stores the pepper used together with the password in the hashing function.\n *

\n * This methods should be used just after the creation of this object.\n *\n * @param pepper the pepper used.\n * @since 0.1.0\n */\n void setPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n }\n\n", "metadata": {"chunk_id": "doc_46_chunk_12", "original_index": 12, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_12"}, "type": "Document"} +{"page_content": " /**\n * Produces a human-readable description of the {@link Hash}.\n *\n * @return a readable version of this object\n * @since 0.1.0\n */\n @Override\n public String toString()\n {\n StringBuilder sb = new StringBuilder();\n if (this.hashingFunction != null)\n {\n sb.append(hashingFunction.getClass().getSimpleName());\n }\n sb.append(\"[salt=\").append(getSalt()).append(\", pepper=\").append(getPepper()).append(\", hash=\").append(getResult())\n .append(\"]\");\n return sb.toString();\n }\n\n", "metadata": {"chunk_id": "doc_46_chunk_13", "original_index": 13, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_13"}, "type": "Document"} +{"page_content": " /**\n * Two {@link Hash}es are considered equals if they contain\n * the same hash, salt, pepper and they are generated with\n * the same {@link HashingFunction}\n *\n * @param obj the object to compare\n * @return true if equals\n * @since 0.1.0\n */\n @Override\n public boolean equals(Object obj)\n {\n if (obj == null || !this.getClass().equals(obj.getClass()))\n {\n return false;\n }\n\n", "metadata": {"chunk_id": "doc_46_chunk_14", "original_index": 14, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_14"}, "type": "Document"} +{"page_content": " Hash otherHash = (Hash) obj;\n return hasSameValues(otherHash);\n }\n\n private boolean hasSameValues(Hash otherHash)\n {\n return Arrays.equals(this.result, otherHash.result) //\n && Arrays.equals(this.bytes, otherHash.bytes) //\n && Arrays.equals(this.salt, otherHash.salt) //\n && areEquals(this.pepper, otherHash.pepper) //\n && this.hashingFunction.equals(otherHash.hashingFunction);\n }\n\n private static boolean areEquals(CharSequence cs1, CharSequence cs2)\n {\n if (cs1 == cs2)\n {\n return true;\n }\n else if (cs1 != null && cs2 != null)\n {\n return cs1.equals(cs2);\n }\n return false;\n }\n\n @Override\n public int hashCode()\n {\n return Objects.hash(Arrays.hashCode(result), Arrays.hashCode(salt), pepper, hashingFunction);\n }\n}\n", "metadata": {"chunk_id": "doc_46_chunk_15", "original_index": 15, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_15"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "metadata": {"chunk_id": "doc_47_chunk_0", "original_index": 0, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_0"}, "type": "Document"} +{"page_content": "/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_1", "original_index": 1, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_1"}, "type": "Document"} +{"page_content": " /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param salt cryptographic salt\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addSalt(String salt)\n {\n this.salt = Utils.fromCharSequenceToBytes(salt);\n return this;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param saltAsBytes cryptographic salt as bytes array\n * @return this builder\n * @since 1.7.0\n */\n public HashBuilder addSalt(byte[] saltAsBytes)\n {\n this.salt = saltAsBytes;\n return this;\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_2", "original_index": 2, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_2"}, "type": "Document"} +{"page_content": " /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

\n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_3", "original_index": 3, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_3"}, "type": "Document"} +{"page_content": " /**\n * Add a random cryptographic salt in the hashing process with a given length.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param length the length of the salt produced\n * @return this builder\n * @throws BadParametersException if the length is non-positive\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt(int length)\n {\n if (length <= 0)\n {\n throw new BadParametersException(\"Salt cannot have a non-positive length\");\n }\n else\n {\n this.salt = SaltGenerator.generate(length);\n }\n return this;\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_4", "original_index": 4, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_4"}, "type": "Document"} +{"page_content": " /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_5", "original_index": 5, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_5"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

\n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_6", "original_index": 6, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_6"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_7", "original_index": 7, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_7"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_8", "original_index": 8, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_8"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link BcryptFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getBcryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withBcrypt()\n {\n return with(AlgorithmFinder.getBcryptInstance());\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_9", "original_index": 9, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_9"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link ScryptFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getScryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withScrypt()\n {\n return with(AlgorithmFinder.getScryptInstance());\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_10", "original_index": 10, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_10"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link MessageDigestFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.4.0\n */\n public Hash withMessageDigest()\n {\n return with(AlgorithmFinder.getMessageDigestInstance());\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_11", "original_index": 11, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_11"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link Argon2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.5.0\n */\n public Hash withArgon2()\n {\n return with(AlgorithmFinder.getArgon2Instance());\n }\n\n", "metadata": {"chunk_id": "doc_47_chunk_12", "original_index": 12, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_12"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link BalloonHashingFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.8.0\n */\n public Hash withBalloonHashing()\n {\n return with(AlgorithmFinder.getBalloonHashingInstance());\n }\n\n}\n", "metadata": {"chunk_id": "doc_47_chunk_13", "original_index": 13, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_13"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "metadata": {"chunk_id": "doc_48_chunk_0", "original_index": 0, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_0"}, "type": "Document"} +{"page_content": "import com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n", "metadata": {"chunk_id": "doc_48_chunk_1", "original_index": 1, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_1"}, "type": "Document"} +{"page_content": "\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n", "metadata": {"chunk_id": "doc_48_chunk_2", "original_index": 2, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_2"}, "type": "Document"} +{"page_content": " @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n", "metadata": {"chunk_id": "doc_48_chunk_3", "original_index": 3, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_3"}, "type": "Document"} +{"page_content": " // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n", "metadata": {"chunk_id": "doc_48_chunk_4", "original_index": 4, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_4"}, "type": "Document"} +{"page_content": " @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n", "metadata": {"chunk_id": "doc_48_chunk_5", "original_index": 5, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_5"}, "type": "Document"} +{"page_content": " // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n", "metadata": {"chunk_id": "doc_48_chunk_6", "original_index": 6, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_6"}, "type": "Document"} +{"page_content": "\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n", "metadata": {"chunk_id": "doc_48_chunk_7", "original_index": 7, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_7"}, "type": "Document"} +{"page_content": "\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n", "metadata": {"chunk_id": "doc_48_chunk_8", "original_index": 8, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_8"}, "type": "Document"} +{"page_content": " @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n", "metadata": {"chunk_id": "doc_48_chunk_9", "original_index": 9, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_9"}, "type": "Document"} +{"page_content": "\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n", "metadata": {"chunk_id": "doc_48_chunk_10", "original_index": 10, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_10"}, "type": "Document"} +{"page_content": " String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n", "metadata": {"chunk_id": "doc_48_chunk_11", "original_index": 11, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_11"}, "type": "Document"} +{"page_content": " // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n", "metadata": {"chunk_id": "doc_48_chunk_12", "original_index": 12, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_12"}, "type": "Document"} +{"page_content": "\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n", "metadata": {"chunk_id": "doc_48_chunk_13", "original_index": 13, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_13"}, "type": "Document"} +{"page_content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n", "metadata": {"chunk_id": "doc_49_chunk_0", "original_index": 0, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_0"}, "type": "Document"} +{"page_content": " boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n", "metadata": {"chunk_id": "doc_49_chunk_1", "original_index": 1, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_1"}, "type": "Document"} +{"page_content": "\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n", "metadata": {"chunk_id": "doc_49_chunk_2", "original_index": 2, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_2"}, "type": "Document"} +{"page_content": " (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n", "metadata": {"chunk_id": "doc_49_chunk_3", "original_index": 3, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_3"}, "type": "Document"} +{"page_content": " /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n", "metadata": {"chunk_id": "doc_49_chunk_4", "original_index": 4, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_4"}, "type": "Document"} +{"page_content": "\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n", "metadata": {"chunk_id": "doc_49_chunk_5", "original_index": 5, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_5"}, "type": "Document"} +{"page_content": " // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"’(っ^▿^)۶\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A٩(˘◡˘ ) ❌❌ ❌❌❌\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n", "metadata": {"chunk_id": "doc_49_chunk_6", "original_index": 6, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_6"}, "type": "Document"} +{"page_content": " Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"ŸŁĀPRČ\")\n .addSalt(\"ŸŁĀPRČAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n", "metadata": {"chunk_id": "doc_49_chunk_7", "original_index": 7, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_7"}, "type": "Document"} +{"page_content": " private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n", "metadata": {"chunk_id": "doc_49_chunk_8", "original_index": 8, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_8"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2021 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j.types;\n\n", "metadata": {"chunk_id": "doc_50_chunk_0", "original_index": 0, "pid": "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e_0"}, "type": "Document"} +{"page_content": "/**\n * Enum containing the different variations of Argon2.\n *\n * @author David Bertoldi\n * @see Argon2\n * @since 1.5.0\n */\npublic enum Argon2\n{\n /**\n * It maximizes resistance to GPU cracking attacks.\n * It accesses the memory array in a password dependent order, which reduces the possibility of time–memory trade-off (TMTO) attacks,\n * but introduces possible side-channel attacks\n */\n D,\n\n /**\n * It is optimized to resist side-channel attacks. It accesses the memory array in a password independent order.\n */\n I,\n\n /**\n * It is a hybrid version. It follows the Argon2i approach for the first half pass over memory and the Argon2d approach for subsequent passes.\n * It is recommended to use Argon2id except when there are reasons to prefer one of the other two modes.\n */\n ID;\n\n}\n", "metadata": {"chunk_id": "doc_50_chunk_1", "original_index": 1, "pid": "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e_1"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/plugins/wasi_crypto/symmetric/tag.h - Symmetric Tag class ===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the Symmetric Tag definition.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n", "metadata": {"chunk_id": "doc_51_chunk_0", "original_index": 0, "pid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298_0"}, "type": "Document"} +{"page_content": "#include \"utils/error.h\"\n#include \"utils/secret_vec.h\"\n\n#include \"common/span.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace Symmetric {\n\n/// Authentication tag, that can be verified without channels using the provided\n/// APIs. Very small and no streaming.\n///\n/// More detail:\n/// https://github.com/WebAssembly/wasi-crypto/blob/main/docs/wasi-crypto.md#authentication-tags\nclass Tag {\npublic:\n Tag(Tag &&Data) noexcept = default;\n Tag &operator=(Tag &&Data) noexcept = default;\n Tag(const Tag &Data) noexcept = delete;\n Tag &operator=(const Tag &Data) noexcept = delete;\n\n", "metadata": {"chunk_id": "doc_51_chunk_1", "original_index": 1, "pid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298_1"}, "type": "Document"} +{"page_content": " Tag(SecretVec &&Data) noexcept : Data(std::move(Data)) {}\n\n size_t len() const noexcept { return Data.size(); }\n\n /// The function MUST return `__WASI_CRYPTO_ERRNO_INVALID_TAG` if the\n /// tags don't match.\n WasiCryptoExpect verify(Span RawTag) const noexcept;\n\n WasiCryptoExpect pull(Span Raw) const noexcept;\n\nprivate:\n SecretVec Data;\n};\n\n} // namespace Symmetric\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "metadata": {"chunk_id": "doc_51_chunk_2", "original_index": 2, "pid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298_2"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"loader/serialize.h\"\n\n#include \n#include \n#include \n\nnamespace {\n\nWasmEdge::Configure Conf;\nWasmEdge::Loader::Serializer Ser(Conf);\n\nWasmEdge::AST::CodeSection\ncreateCodeSec(std::vector Instructions) {\n WasmEdge::AST::CodeSection CodeSec;\n WasmEdge::AST::CodeSegment CodeSeg;\n WasmEdge::AST::Expression Expr;\n Expr.getInstrs() = Instructions;\n CodeSeg.getExpr() = Expr;\n CodeSec.getContent().push_back(CodeSeg);\n return CodeSec;\n}\n\n", "metadata": {"chunk_id": "doc_52_chunk_0", "original_index": 0, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_0"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeBlockControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 1. Test block control instructions.\n //\n // 1. Serialize block with only end operation.\n // 2. Serialize loop with only end operation.\n // 3. Serialize block with instructions.\n // 4. Serialize loop with instructions.\n\n WasmEdge::AST::Instruction Block(WasmEdge::OpCode::Block);\n WasmEdge::AST::Instruction Loop(WasmEdge::OpCode::Loop);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n", "metadata": {"chunk_id": "doc_52_chunk_1", "original_index": 1, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_1"}, "type": "Document"} +{"page_content": " Block.setEmptyBlockType();\n Instructions = {Block, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_2", "original_index": 2, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_2"}, "type": "Document"} +{"page_content": " Loop.setEmptyBlockType();\n Instructions = {Loop, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_3", "original_index": 3, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_3"}, "type": "Document"} +{"page_content": " Loop.setEmptyBlockType();\n Instructions = {Block, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_4", "original_index": 4, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_4"}, "type": "Document"} +{"page_content": " Loop.setEmptyBlockType();\n Instructions = {Loop, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "metadata": {"chunk_id": "doc_52_chunk_5", "original_index": 5, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_5"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeIfElseControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 2. Test serialize if-else control instruction.\n //\n // 1. Serialize if statement with only end operation.\n // 2. Serialize if and else statements with only end operation.\n // 3. Serialize if statement with instructions.\n // 4. Serialize if and else statements with instructions.\n\n", "metadata": {"chunk_id": "doc_52_chunk_6", "original_index": 6, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_6"}, "type": "Document"} +{"page_content": " WasmEdge::AST::Instruction If(WasmEdge::OpCode::If);\n WasmEdge::AST::Instruction Else(WasmEdge::OpCode::Else);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n If.setEmptyBlockType();\n Instructions = {If, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_7", "original_index": 7, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_7"}, "type": "Document"} +{"page_content": " If.setEmptyBlockType();\n Instructions = {If, Else, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x05U, // OpCode Else\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_8", "original_index": 8, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_8"}, "type": "Document"} +{"page_content": " If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_9", "original_index": 9, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_9"}, "type": "Document"} +{"page_content": " If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, Else,\n I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0EU, // Content size = 14\n 0x01U, // Vector length = 1\n 0x0CU, // Code segment size = 12\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x05U, // OpCode Else\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in else statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "metadata": {"chunk_id": "doc_52_chunk_10", "original_index": 10, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_10"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeBrControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 3. Test branch control instructions.\n //\n // 1. Serialize valid label index.\n\n WasmEdge::AST::Instruction Br(WasmEdge::OpCode::Br);\n WasmEdge::AST::Instruction BrIf(WasmEdge::OpCode::Br_if);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "metadata": {"chunk_id": "doc_52_chunk_11", "original_index": 11, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_11"}, "type": "Document"} +{"page_content": " Br.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {Br, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x0CU, // OpCode Br.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_12", "original_index": 12, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_12"}, "type": "Document"} +{"page_content": " BrIf.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrIf, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected[5] = 0x0DU; // OpCode Br_if.\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeBrTableControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 4. Test branch table control instruction.\n //\n // 1. Serialize instruction with empty label vector.\n // 2. Serialize instruction with label vector.\n\n WasmEdge::AST::Instruction BrTable(WasmEdge::OpCode::Br_table);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "metadata": {"chunk_id": "doc_52_chunk_13", "original_index": 13, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_13"}, "type": "Document"} +{"page_content": " BrTable.setLabelListSize(1);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x00U, // Vector length = 0\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_14", "original_index": 14, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_14"}, "type": "Document"} +{"page_content": " BrTable.setLabelListSize(4);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFF1U;\n BrTable.getLabelList()[1].TargetIndex = 0xFFFFFFF2U;\n BrTable.getLabelList()[2].TargetIndex = 0xFFFFFFF3U;\n BrTable.getLabelList()[3].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n", "metadata": {"chunk_id": "doc_52_chunk_15", "original_index": 15, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_15"}, "type": "Document"} +{"page_content": " 0x1AU, // Content size = 26\n 0x01U, // Vector length = 1\n 0x18U, // Code segment size = 24\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x03U, // Vector length = 3\n 0xF1U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[0]\n 0xF2U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[1]\n 0xF3U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[2]\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "metadata": {"chunk_id": "doc_52_chunk_16", "original_index": 16, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_16"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeCallControlInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 5. Test call control instructions.\n //\n // 1. Serialize call instruction with valid type index.\n // 2. Serialize call_indirect instruction with valid type and table index.\n // 3. Serialize call_indirect instruction with invalid table index without\n // Ref-Types proposal.\n\n WasmEdge::AST::Instruction Call(WasmEdge::OpCode::Call);\n WasmEdge::AST::Instruction CallIndirect(WasmEdge::OpCode::Call_indirect);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "metadata": {"chunk_id": "doc_52_chunk_17", "original_index": 17, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_17"}, "type": "Document"} +{"page_content": " Call.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {Call, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x10U, // OpCode Call.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Function type index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_18", "original_index": 18, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_18"}, "type": "Document"} +{"page_content": " CallIndirect.getTargetIndex() = 0xFFFFFFFFU;\n CallIndirect.getSourceIndex() = 0x05U;\n Instructions = {CallIndirect, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x11U, // OpCode Call_indirect.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Type index.\n 0x05U, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_19", "original_index": 19, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_19"}, "type": "Document"} +{"page_content": " EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeReferenceInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 6. Test reference instructions.\n //\n // 1. Serialize function reference type.\n // 2. Serialize invalid reference type without Ref-Types proposal.\n\n WasmEdge::AST::Instruction RefNull(WasmEdge::OpCode::Ref__null);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "metadata": {"chunk_id": "doc_52_chunk_20", "original_index": 20, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_20"}, "type": "Document"} +{"page_content": " RefNull.setValType(WasmEdge::TypeCode::FuncRef);\n Instructions = {RefNull, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0xD0U, // OpCode Ref__null.\n 0x70U, // FuncRef\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_21", "original_index": 21, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_21"}, "type": "Document"} +{"page_content": " RefNull.setValType(WasmEdge::TypeCode::ExternRef);\n Instructions = {RefNull, End};\n EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeParametricInstruction) {\n WasmEdge::Configure ConfNoSIMD;\n ConfNoSIMD.removeProposal(WasmEdge::Proposal::SIMD);\n WasmEdge::Loader::Serializer SerNoSIMD(ConfNoSIMD);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 7. Test parametric instructions.\n //\n // 1. Serialize valid select_t instruction with value type list.\n // 2. Serialize invalid value type list without SIMD proposal.\n\n WasmEdge::AST::Instruction SelectT(WasmEdge::OpCode::Select_t);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "metadata": {"chunk_id": "doc_52_chunk_22", "original_index": 22, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_22"}, "type": "Document"} +{"page_content": " SelectT.setValTypeListSize(2);\n SelectT.getValTypeList()[0] = WasmEdge::TypeCode::I32;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::I64;\n Instructions = {SelectT, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x1CU, // OpCode Select_t.\n 0x02U, // Vector length = 2\n 0x7FU, 0x7EU, // Value types\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_23", "original_index": 23, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_23"}, "type": "Document"} +{"page_content": " SelectT.getValTypeList()[0] = WasmEdge::TypeCode::V128;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::V128;\n Instructions = {SelectT, End};\n EXPECT_FALSE(SerNoSIMD.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeVariableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 8. Test variable instructions.\n //\n // 1. Serialize valid local or global index.\n\n WasmEdge::AST::Instruction LocalGet(WasmEdge::OpCode::Local__get);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "metadata": {"chunk_id": "doc_52_chunk_24", "original_index": 24, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_24"}, "type": "Document"} +{"page_content": " LocalGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {LocalGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x20U, // OpCode Local__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Local index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "metadata": {"chunk_id": "doc_52_chunk_25", "original_index": 25, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_25"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeTableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 9. Test table instructions.\n //\n // 1. Serialize table_get instruction.\n // 2. Serialize table_init instruction.\n\n WasmEdge::AST::Instruction TableGet(WasmEdge::OpCode::Table__get);\n WasmEdge::AST::Instruction TableInit(WasmEdge::OpCode::Table__init);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "metadata": {"chunk_id": "doc_52_chunk_26", "original_index": 26, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_26"}, "type": "Document"} +{"page_content": " TableGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x25U, // OpCode Table__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_27", "original_index": 27, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_27"}, "type": "Document"} +{"page_content": " TableInit.getSourceIndex() = 0x05U;\n TableInit.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableInit, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0CU, // Content size = 12\n 0x01U, // Vector length = 1\n 0x0AU, // Code segment size = 10\n 0x00U, // Local vec(0)\n 0xFCU, 0x0CU, // OpCode Table__init.\n 0x05U, // Element idx.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "metadata": {"chunk_id": "doc_52_chunk_28", "original_index": 28, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_28"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeMemoryInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 10. Test memory instructions.\n //\n // 1. Serialize memory_grow instruction.\n // 2. Serialize i32_load instruction.\n\n WasmEdge::AST::Instruction MemoryGrow(WasmEdge::OpCode::Memory__grow);\n WasmEdge::AST::Instruction I32Load(WasmEdge::OpCode::I32__load);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n Instructions = {MemoryGrow, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0x40U, // OpCode Memory__grow.\n 0x00U, // Checking byte\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_29", "original_index": 29, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_29"}, "type": "Document"} +{"page_content": " I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_30", "original_index": 30, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_30"}, "type": "Document"} +{"page_content": " I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "metadata": {"chunk_id": "doc_52_chunk_31", "original_index": 31, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_31"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeConstInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 11. Test const numeric instructions.\n //\n // 1. Serialize I32 const numeric instruction.\n // 2. Serialize I64 const numeric instruction.\n // 3. Serialize F32 const numeric instruction.\n // 4. Serialize F64 const numeric instruction.\n\n WasmEdge::AST::Instruction I32Const(WasmEdge::OpCode::I32__const);\n WasmEdge::AST::Instruction I64Const(WasmEdge::OpCode::I64__const);\n WasmEdge::AST::Instruction F32Const(WasmEdge::OpCode::F32__const);\n WasmEdge::AST::Instruction F64Const(WasmEdge::OpCode::F64__const);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "metadata": {"chunk_id": "doc_52_chunk_32", "original_index": 32, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_32"}, "type": "Document"} +{"page_content": " I32Const.setNum(-123456);\n Instructions = {I32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x41U, // OpCode I32__const.\n 0xC0U, 0xBBU, 0x78U, // I32 -123456.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_33", "original_index": 33, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_33"}, "type": "Document"} +{"page_content": " I64Const.setNum(static_cast(-112233445566L));\n Instructions = {I64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x42U, // OpCode I64__const.\n 0xC2U, 0x8EU, 0xF6U, 0xF2U, 0xDDU, 0x7CU, // I64 -112233445566\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_34", "original_index": 34, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_34"}, "type": "Document"} +{"page_content": " F32Const.setNum(static_cast(-0x1.921fb4p+1)); // -3.1415926F\n Instructions = {F32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x09U, // Content size = 9\n 0x01U, // Vector length = 1\n 0x07U, // Code segment size = 7\n 0x00U, // Local vec(0)\n 0x43U, // OpCode F32__const.\n 0xDAU, 0x0FU, 0x49U, 0xC0U, // F32 -3.1415926\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "metadata": {"chunk_id": "doc_52_chunk_35", "original_index": 35, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_35"}, "type": "Document"} +{"page_content": " F64Const.setNum(-3.1415926535897932);\n Instructions = {F64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0DU, // Content size = 13\n 0x01U, // Vector length = 1\n 0x0BU, // Code segment size = 11\n 0x00U, // Local vec(0)\n 0x44U, // OpCode F64__const.\n 0x18U, 0x2DU, 0x44U, 0x54U,\n 0xFBU, 0x21U, 0x09U, 0xC0U, // F64 -3.1415926535897932\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n} // namespace\n", "metadata": {"chunk_id": "doc_52_chunk_36", "original_index": 36, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_36"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiLoggingMock {\n\nusing namespace std::literals;\n\nclass Log : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"wasi-logging\"sv);\n return Unexpect(ErrCode::Value::HostFuncError);\n }\n};\n\n} // namespace WasiLoggingMock\n} // namespace Host\n} // namespace WasmEdge", "metadata": {"chunk_id": "doc_53_chunk_0", "original_index": 0, "pid": "adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823_0"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"common/defines.h\"\n#if WASMEDGE_OS_MACOS\n\n#include \"common/errcode.h\"\n#include \"host/wasi/environ.h\"\n#include \"macos.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WASI {\n\nWasiExpect Environ::procRaise(__wasi_signal_t Signal) const noexcept {\n int SysSignal;\n switch (Signal) {\n case __WASI_SIGNAL_NONE:\n SysSignal = 0;\n break;\n case __WASI_SIGNAL_HUP:\n SysSignal = SIGHUP;\n break;\n case __WASI_SIGNAL_INT:\n SysSignal = SIGINT;\n break;\n case __WASI_SIGNAL_QUIT:\n SysSignal = SIGQUIT;\n break;\n case __WASI_SIGNAL_ILL:\n SysSignal = SIGILL;\n break;\n case __WASI_SIGNAL_TRAP:\n SysSignal = SIGTRAP;\n break;\n case __WASI_SIGNAL_ABRT:\n SysSignal = SIGABRT;\n break;\n case __WASI_SIGNAL_BUS:\n SysSignal = SIGBUS;\n break;\n case __WASI_SIGNAL_FPE:\n SysSignal = SIGFPE;\n break;\n", "metadata": {"chunk_id": "doc_54_chunk_0", "original_index": 0, "pid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84_0"}, "type": "Document"} +{"page_content": " case __WASI_SIGNAL_KILL:\n SysSignal = SIGKILL;\n break;\n case __WASI_SIGNAL_USR1:\n SysSignal = SIGUSR1;\n break;\n case __WASI_SIGNAL_SEGV:\n SysSignal = SIGSEGV;\n break;\n case __WASI_SIGNAL_USR2:\n SysSignal = SIGUSR2;\n break;\n case __WASI_SIGNAL_PIPE:\n SysSignal = SIGPIPE;\n break;\n case __WASI_SIGNAL_ALRM:\n SysSignal = SIGALRM;\n break;\n case __WASI_SIGNAL_TERM:\n SysSignal = SIGTERM;\n break;\n case __WASI_SIGNAL_CHLD:\n SysSignal = SIGCHLD;\n break;\n case __WASI_SIGNAL_CONT:\n SysSignal = SIGCONT;\n break;\n case __WASI_SIGNAL_STOP:\n SysSignal = SIGSTOP;\n break;\n case __WASI_SIGNAL_TSTP:\n SysSignal = SIGTSTP;\n break;\n case __WASI_SIGNAL_TTIN:\n SysSignal = SIGTTIN;\n break;\n case __WASI_SIGNAL_TTOU:\n SysSignal = SIGTTOU;\n break;\n case __WASI_SIGNAL_URG:\n SysSignal = SIGURG;\n break;\n case __WASI_SIGNAL_XCPU:\n SysSignal = SIGXCPU;\n break;\n", "metadata": {"chunk_id": "doc_54_chunk_1", "original_index": 1, "pid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84_1"}, "type": "Document"} +{"page_content": " case __WASI_SIGNAL_XFSZ:\n SysSignal = SIGXFSZ;\n break;\n case __WASI_SIGNAL_VTALRM:\n SysSignal = SIGVTALRM;\n break;\n case __WASI_SIGNAL_PROF:\n SysSignal = SIGPROF;\n break;\n case __WASI_SIGNAL_WINCH:\n SysSignal = SIGWINCH;\n break;\n case __WASI_SIGNAL_SYS:\n SysSignal = SIGSYS;\n break;\n case __WASI_SIGNAL_POLL:\n case __WASI_SIGNAL_PWR:\n default:\n return WasiUnexpect(__WASI_ERRNO_NOTSUP);\n }\n if (auto Res = std::raise(SysSignal); Res != 0) {\n return WasiUnexpect(fromErrNo(errno));\n }\n return {};\n}\n\nWasiExpect Environ::schedYield() const noexcept {\n ::sched_yield();\n return {};\n}\n\n} // namespace WASI\n} // namespace Host\n} // namespace WasmEdge\n\n#endif\n", "metadata": {"chunk_id": "doc_54_chunk_2", "original_index": 2, "pid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84_2"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCryptoMock {\n\nusing namespace std::literals;\nstatic inline constexpr const uint32_t kWASICryptoError = 1U;\n\nnamespace Common {\nclass ArrayOutputLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass ArrayOutputPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_0", "original_index": 0, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_0"}, "type": "Document"} +{"page_content": "class OptionsOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_1", "original_index": 1, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_1"}, "type": "Document"} +{"page_content": "class OptionsSet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_2", "original_index": 2, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_2"}, "type": "Document"} +{"page_content": "class OptionsSetGuestBuffer\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_3", "original_index": 3, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_3"}, "type": "Document"} +{"page_content": "class SecretsManagerInvalidate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Common\n\nnamespace AsymmetricCommon {\nclass KeypairGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_4", "original_index": 4, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_4"}, "type": "Document"} +{"page_content": "class KeypairGenerateManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_5", "original_index": 5, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_5"}, "type": "Document"} +{"page_content": "class KeypairReplaceManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_6", "original_index": 6, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_6"}, "type": "Document"} +{"page_content": "class KeypairFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromPkAndSk : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_7", "original_index": 7, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_7"}, "type": "Document"} +{"page_content": "class KeypairExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairPublickey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_8", "original_index": 8, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_8"}, "type": "Document"} +{"page_content": "class KeypairSecretkey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_9", "original_index": 9, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_9"}, "type": "Document"} +{"page_content": "class PublickeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_10", "original_index": 10, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_10"}, "type": "Document"} +{"page_content": "class PublickeyFromSecretkey\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_11", "original_index": 11, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_11"}, "type": "Document"} +{"page_content": "class SecretkeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_12", "original_index": 12, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_12"}, "type": "Document"} +{"page_content": "class SecretkeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace AsymmetricCommon\n\nnamespace Kx {\nclass Dh : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_13", "original_index": 13, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_13"}, "type": "Document"} +{"page_content": "class Encapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Decapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Kx\n\n", "metadata": {"chunk_id": "doc_55_chunk_14", "original_index": 14, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_14"}, "type": "Document"} +{"page_content": "namespace Signatures {\nclass Export : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Import : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_15", "original_index": 15, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_15"}, "type": "Document"} +{"page_content": "class StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateUpdate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_16", "original_index": 16, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_16"}, "type": "Document"} +{"page_content": "class StateSign : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_17", "original_index": 17, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_17"}, "type": "Document"} +{"page_content": "class VerificationStateOpen\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateUpdate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_18", "original_index": 18, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_18"}, "type": "Document"} +{"page_content": "class VerificationStateVerify\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateClose\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Close : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n} // namespace Signatures\n\n", "metadata": {"chunk_id": "doc_55_chunk_19", "original_index": 19, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_19"}, "type": "Document"} +{"page_content": "namespace Symmetric {\nclass KeyGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_20", "original_index": 20, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_20"}, "type": "Document"} +{"page_content": "class KeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyGenerateManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_21", "original_index": 21, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_21"}, "type": "Document"} +{"page_content": "class KeyStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyReplaceManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_22", "original_index": 22, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_22"}, "type": "Document"} +{"page_content": "class KeyId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_23", "original_index": 23, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_23"}, "type": "Document"} +{"page_content": "class StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClone : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_24", "original_index": 24, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_24"}, "type": "Document"} +{"page_content": "class StateOptionsGet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_25", "original_index": 25, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_25"}, "type": "Document"} +{"page_content": "class StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateAbsorb : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_26", "original_index": 26, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_26"}, "type": "Document"} +{"page_content": "class StateSqueeze : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeTag : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_27", "original_index": 27, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_27"}, "type": "Document"} +{"page_content": "class StateSqueezeKey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateMaxTagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_28", "original_index": 28, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_28"}, "type": "Document"} +{"page_content": "class StateEncrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_29", "original_index": 29, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_29"}, "type": "Document"} +{"page_content": "class StateDecrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_30", "original_index": 30, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_30"}, "type": "Document"} +{"page_content": "class StateRatchet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "metadata": {"chunk_id": "doc_55_chunk_31", "original_index": 31, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_31"}, "type": "Document"} +{"page_content": "class TagPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Symmetric\n\n} // namespace WasiCryptoMock\n} // namespace Host\n} // namespace WasmEdge\n", "metadata": {"chunk_id": "doc_55_chunk_32", "original_index": 32, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_32"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n", "metadata": {"chunk_id": "doc_56_chunk_0", "original_index": 0, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_0"}, "type": "Document"} +{"page_content": " /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n", "metadata": {"chunk_id": "doc_56_chunk_1", "original_index": 1, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_1"}, "type": "Document"} +{"page_content": " FunctionSection &getFunctionSection() { return FunctionSec; }\n const TableSection &getTableSection() const { return TableSec; }\n TableSection &getTableSection() { return TableSec; }\n const MemorySection &getMemorySection() const { return MemorySec; }\n MemorySection &getMemorySection() { return MemorySec; }\n const GlobalSection &getGlobalSection() const { return GlobalSec; }\n GlobalSection &getGlobalSection() { return GlobalSec; }\n const ExportSection &getExportSection() const { return ExportSec; }\n ExportSection &getExportSection() { return ExportSec; }\n const StartSection &getStartSection() const { return StartSec; }\n StartSection &getStartSection() { return StartSec; }\n const ElementSection &getElementSection() const { return ElementSec; }\n ElementSection &getElementSection() { return ElementSec; }\n const CodeSection &getCodeSection() const { return CodeSec; }\n", "metadata": {"chunk_id": "doc_56_chunk_2", "original_index": 2, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_2"}, "type": "Document"} +{"page_content": " CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n", "metadata": {"chunk_id": "doc_56_chunk_3", "original_index": 3, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_3"}, "type": "Document"} +{"page_content": " /// \\name Section nodes of Module node.\n /// @{\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec;\n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec;\n DataSection DataSec;\n DataCountSection DataCountSec;\n /// @}\n\n /// \\name Data of AOT.\n /// @{\n AOTSection AOTSec;\n Symbol IntrSymbol;\n /// @}\n\n /// \\name Validated flag.\n /// @{\n bool IsValidated = false;\n /// @}\n};\n\nclass CoreModuleSection : public Section {\npublic:\n /// Getter of content.\n const Module &getContent() const noexcept { return Content; }\n Module &getContent() noexcept { return Content; }\n\nprivate:\n Module Content;\n};\n\nnamespace Component {\n\n", "metadata": {"chunk_id": "doc_56_chunk_4", "original_index": 4, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_4"}, "type": "Document"} +{"page_content": "class Component {\n using Section =\n std::variant;\n\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n", "metadata": {"chunk_id": "doc_56_chunk_5", "original_index": 5, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_5"}, "type": "Document"} +{"page_content": " /// Getter of layer vector.\n const std::vector &getLayer() const noexcept { return Layer; }\n std::vector &getLayer() noexcept { return Layer; }\n\n std::vector

&getSections() noexcept { return Secs; }\n Span getSections() const noexcept { return Secs; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n std::vector Layer;\n\n std::vector
Secs;\n /// @}\n};\n\n} // namespace Component\n\n} // namespace AST\n} // namespace WasmEdge\n", "metadata": {"chunk_id": "doc_56_chunk_6", "original_index": 6, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_6"}, "type": "Document"} +{"page_content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\n", "metadata": {"chunk_id": "doc_57_chunk_0", "original_index": 0, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_0"}, "type": "Document"} +{"page_content": "class AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "metadata": {"chunk_id": "doc_57_chunk_1", "original_index": 1, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_1"}, "type": "Document"} +{"page_content": "class AVOptSetDouble : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetDouble(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetImageSize : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetImageSize(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "metadata": {"chunk_id": "doc_57_chunk_2", "original_index": 2, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_2"}, "type": "Document"} +{"page_content": "class AVOptSetPixelFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetPixelFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetSampleFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetSampleFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "metadata": {"chunk_id": "doc_57_chunk_3", "original_index": 3, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_3"}, "type": "Document"} +{"page_content": "class AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\n", "metadata": {"chunk_id": "doc_57_chunk_4", "original_index": 4, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_4"}, "type": "Document"} +{"page_content": "class AVUtilVersion : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilVersion(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &);\n};\n\nclass AVGetChannelLayoutNbChannels\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNbChannels(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutNameLen\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNameLen(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n", "metadata": {"chunk_id": "doc_57_chunk_5", "original_index": 5, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_5"}, "type": "Document"} +{"page_content": "class AVGetChannelLayoutName\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutName(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId, uint32_t NamePtr,\n uint32_t NameLen);\n};\n\nclass AVGetChannelLayoutMask\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutMask(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n", "metadata": {"chunk_id": "doc_57_chunk_6", "original_index": 6, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_6"}, "type": "Document"} +{"page_content": "class AVGetDefaultChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetDefaultChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n int32_t ChannelLayoutId);\n};\n\nclass AVUtilConfigurationLength\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfigurationLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "metadata": {"chunk_id": "doc_57_chunk_7", "original_index": 7, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_7"}, "type": "Document"} +{"page_content": "class AVUtilConfiguration : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfiguration(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t ConfigPtr,\n uint32_t ConfigLen);\n};\n\nclass AVUtilLicenseLength : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicenseLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilLicense : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicense(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t LicensePtr,\n uint32_t LicenseLen);\n};\n\n} // namespace AVUtil\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "metadata": {"chunk_id": "doc_57_chunk_8", "original_index": 8, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_8"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n", "metadata": {"chunk_id": "doc_58_chunk_0", "original_index": 0, "pid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_0"}, "type": "Document"} +{"page_content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "metadata": {"chunk_id": "doc_58_chunk_1", "original_index": 1, "pid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_1"}, "type": "Document"} +{"page_content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n", "metadata": {"chunk_id": "doc_59_chunk_0", "original_index": 0, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_0"}, "type": "Document"} +{"page_content": " FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n", "metadata": {"chunk_id": "doc_59_chunk_1", "original_index": 1, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_1"}, "type": "Document"} +{"page_content": " AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\n", "metadata": {"chunk_id": "doc_59_chunk_2", "original_index": 2, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_2"}, "type": "Document"} +{"page_content": "Expect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\n", "metadata": {"chunk_id": "doc_59_chunk_3", "original_index": 3, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_3"}, "type": "Document"} +{"page_content": "Expect AVChapterSetStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n int64_t StartValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->start = StartValue;\n return static_cast(ErrNo::Success);\n}\n\n", "metadata": {"chunk_id": "doc_59_chunk_4", "original_index": 4, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_4"}, "type": "Document"} +{"page_content": "Expect AVChapterEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->end;\n}\n\n", "metadata": {"chunk_id": "doc_59_chunk_5", "original_index": 5, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_5"}, "type": "Document"} +{"page_content": "Expect AVChapterSetEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t EndValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->end = EndValue;\n return static_cast(ErrNo::Success);\n}\n\n", "metadata": {"chunk_id": "doc_59_chunk_6", "original_index": 6, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_6"}, "type": "Document"} +{"page_content": "Expect AVChapterMetadata::body(const Runtime::CallingFrame &Frame,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, uint32_t DictPtr) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(DictId, MemInst, uint32_t, DictPtr,\n \"Failed when accessing the return AVDictionary memory\"sv);\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n\n AVDictionary **AvDictionary =\n static_cast(av_malloc(sizeof(AVDictionary *)));\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n", "metadata": {"chunk_id": "doc_59_chunk_7", "original_index": 7, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_7"}, "type": "Document"} +{"page_content": " // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n *AvDictionary = (*AvChapter)->metadata;\n FFMPEG_PTR_STORE(AvDictionary, DictId);\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetMetadata::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n uint32_t DictId) {\n\n", "metadata": {"chunk_id": "doc_59_chunk_8", "original_index": 8, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_8"}, "type": "Document"} +{"page_content": " FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n FFMPEG_PTR_FETCH(AvDictionary, DictId, AVDictionary *);\n\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n if (AvDictionary == nullptr)\n (*AvChapter)->metadata = nullptr;\n else\n (*AvChapter)->metadata = *AvDictionary;\n return static_cast(ErrNo::Success);\n}\n\n} // namespace AVFormat\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "metadata": {"chunk_id": "doc_59_chunk_9", "original_index": 9, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_9"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"asymmetric_common/publickey.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace AsymmetricCommon {\n\nWasiCryptoExpect\nimportPk(AsymmetricCommon::Algorithm Alg, Span Encoded,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [=](auto Factory) noexcept -> WasiCryptoExpect {\n return decltype(Factory)::PublicKey::import(Encoded, Encoding);\n },\n Alg);\n}\n\n", "metadata": {"chunk_id": "doc_60_chunk_0", "original_index": 0, "pid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26_0"}, "type": "Document"} +{"page_content": "WasiCryptoExpect>\npkExportData(const PkVariant &PkVariant,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [Encoding](const auto &Pk) noexcept { return Pk.exportData(Encoding); },\n PkVariant);\n}\n\nWasiCryptoExpect pkVerify(const PkVariant &PkVariant) noexcept {\n return std::visit([](const auto &Pk) noexcept { return Pk.verify(); },\n PkVariant);\n}\n\n} // namespace AsymmetricCommon\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "metadata": {"chunk_id": "doc_60_chunk_1", "original_index": 1, "pid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26_1"}, "type": "Document"} +{"page_content": "use std::ffi::OsStr;\nuse std::io::{self, Error, ErrorKind, Result};\nuse std::iter::once;\nuse std::os::windows::ffi::OsStrExt;\nuse std::sync::mpsc::TryRecvError;\nuse std::sync::Arc;\n\nuse crate::event::{OnResize, WindowSize};\nuse crate::tty::windows::child::ChildExitWatcher;\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options, Shell};\n\nmod blocking;\nmod child;\nmod conpty;\n\nuse blocking::{UnblockedReader, UnblockedWriter};\nuse conpty::Conpty as Backend;\nuse miow::pipe::{AnonRead, AnonWrite};\nuse polling::{Event, Poller};\n\npub const PTY_CHILD_EVENT_TOKEN: usize = 1;\npub const PTY_READ_WRITE_TOKEN: usize = 2;\n\ntype ReadPipe = UnblockedReader;\ntype WritePipe = UnblockedWriter;\n\n", "metadata": {"chunk_id": "doc_61_chunk_0", "original_index": 0, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_0"}, "type": "Document"} +{"page_content": "pub struct Pty {\n // XXX: Backend is required to be the first field, to ensure correct drop order. Dropping\n // `conout` before `backend` will cause a deadlock (with Conpty).\n backend: Backend,\n conout: ReadPipe,\n conin: WritePipe,\n child_watcher: ChildExitWatcher,\n}\n\npub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result {\n conpty::new(config, window_size)\n .ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n}\n\n", "metadata": {"chunk_id": "doc_61_chunk_1", "original_index": 1, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_1"}, "type": "Document"} +{"page_content": "impl Pty {\n fn new(\n backend: impl Into,\n conout: impl Into,\n conin: impl Into,\n child_watcher: ChildExitWatcher,\n ) -> Self {\n Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }\n }\n\n pub fn child_watcher(&self) -> &ChildExitWatcher {\n &self.child_watcher\n }\n}\n\nfn with_key(mut event: Event, key: usize) -> Event {\n event.key = key;\n event\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = ReadPipe;\n type Writer = WritePipe;\n\n", "metadata": {"chunk_id": "doc_61_chunk_2", "original_index": 2, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_2"}, "type": "Document"} +{"page_content": " #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n", "metadata": {"chunk_id": "doc_61_chunk_3", "original_index": 3, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_3"}, "type": "Document"} +{"page_content": " Ok(())\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n", "metadata": {"chunk_id": "doc_61_chunk_4", "original_index": 4, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_4"}, "type": "Document"} +{"page_content": " Ok(())\n }\n\n #[inline]\n fn deregister(&mut self, _poll: &Arc) -> io::Result<()> {\n self.conin.deregister();\n self.conout.deregister();\n self.child_watcher.deregister();\n\n Ok(())\n }\n\n #[inline]\n fn reader(&mut self) -> &mut Self::Reader {\n &mut self.conout\n }\n\n #[inline]\n fn writer(&mut self) -> &mut Self::Writer {\n &mut self.conin\n }\n}\n\n", "metadata": {"chunk_id": "doc_61_chunk_5", "original_index": 5, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_5"}, "type": "Document"} +{"page_content": "impl EventedPty for Pty {\n fn next_child_event(&mut self) -> Option {\n match self.child_watcher.event_rx().try_recv() {\n Ok(ev) => Some(ev),\n Err(TryRecvError::Empty) => None,\n Err(TryRecvError::Disconnected) => Some(ChildEvent::Exited(None)),\n }\n }\n}\n\nimpl OnResize for Pty {\n fn on_resize(&mut self, window_size: WindowSize) {\n self.backend.on_resize(window_size)\n }\n}\n\nfn cmdline(config: &Options) -> String {\n let default_shell = Shell::new(\"powershell\".to_owned(), Vec::new());\n let shell = config.shell.as_ref().unwrap_or(&default_shell);\n\n", "metadata": {"chunk_id": "doc_61_chunk_6", "original_index": 6, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_6"}, "type": "Document"} +{"page_content": " once(shell.program.as_str())\n .chain(shell.args.iter().map(|s| s.as_str()))\n .collect::>()\n .join(\" \")\n}\n\n/// Converts the string slice into a Windows-standard representation for \"W\"-\n/// suffixed function variants, which accept UTF-16 encoded string values.\npub fn win32_string + ?Sized>(value: &S) -> Vec {\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n}\n", "metadata": {"chunk_id": "doc_61_chunk_7", "original_index": 7, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_7"}, "type": "Document"} +{"page_content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n", "metadata": {"chunk_id": "doc_62_chunk_0", "original_index": 0, "pid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_0"}, "type": "Document"} +{"page_content": "/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n", "metadata": {"chunk_id": "doc_62_chunk_1", "original_index": 1, "pid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_1"}, "type": "Document"} +{"page_content": " let base = Value::Boolean(false);\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Integer(0.into());\n let replacement = Value::Integer(1.into());\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::String(String::new());\n let replacement = Value::String(String::from(\"test\"));\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Table(Table::new());\n let replacement = Value::Table(Table::new());\n assert_eq!(merge(base.clone(), replacement), base);\n }\n\n", "metadata": {"chunk_id": "doc_62_chunk_2", "original_index": 2, "pid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_2"}, "type": "Document"} +{"page_content": " #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n", "metadata": {"chunk_id": "doc_62_chunk_3", "original_index": 3, "pid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_3"}, "type": "Document"} +{"page_content": " let mut expected_table = Table::new();\n expected_table.insert(String::from(\"b\"), Value::Boolean(false));\n expected_table.insert(String::from(\"a\"), Value::Boolean(true));\n expected_table.insert(String::from(\"c\"), Value::Boolean(false));\n let expected = Value::Table(expected_table);\n\n assert_eq!(merged, expected);\n }\n}\n", "metadata": {"chunk_id": "doc_62_chunk_4", "original_index": 4, "pid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_4"}, "type": "Document"} +{"page_content": "//! TTY related functionality.\n\nuse std::ffi::CStr;\nuse std::fs::File;\nuse std::io::{Error, ErrorKind, Read, Result};\nuse std::mem::MaybeUninit;\nuse std::os::unix::io::{AsRawFd, FromRawFd};\nuse std::os::unix::net::UnixStream;\nuse std::os::unix::process::CommandExt;\nuse std::process::{Child, Command, Stdio};\nuse std::sync::Arc;\nuse std::{env, ptr};\n\nuse libc::{c_int, TIOCSCTTY};\nuse log::error;\nuse polling::{Event, PollMode, Poller};\nuse rustix_openpty::openpty;\nuse rustix_openpty::rustix::termios::Winsize;\n#[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\nuse rustix_openpty::rustix::termios::{self, InputModes, OptionalActions};\nuse signal_hook::consts as sigconsts;\nuse signal_hook::low_level::pipe as signal_pipe;\n\n", "metadata": {"chunk_id": "doc_63_chunk_0", "original_index": 0, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_0"}, "type": "Document"} +{"page_content": "use crate::event::{OnResize, WindowSize};\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options};\n\n// Interest in PTY read/writes.\npub(crate) const PTY_READ_WRITE_TOKEN: usize = 0;\n\n// Interest in new child events.\npub(crate) const PTY_CHILD_EVENT_TOKEN: usize = 1;\n\nmacro_rules! die {\n ($($arg:tt)*) => {{\n error!($($arg)*);\n std::process::exit(1);\n }}\n}\n\n/// Really only needed on BSD, but should be fine elsewhere.\nfn set_controlling_terminal(fd: c_int) {\n let res = unsafe {\n // TIOSCTTY changes based on platform and the `ioctl` call is different\n // based on architecture (32/64). So a generic cast is used to make sure\n // there are no issues. To allow such a generic cast the clippy warning\n // is disabled.\n #[allow(clippy::cast_lossless)]\n libc::ioctl(fd, TIOCSCTTY as _, 0)\n };\n\n", "metadata": {"chunk_id": "doc_63_chunk_1", "original_index": 1, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_1"}, "type": "Document"} +{"page_content": " if res < 0 {\n die!(\"ioctl TIOCSCTTY failed: {}\", Error::last_os_error());\n }\n}\n\n#[derive(Debug)]\nstruct Passwd<'a> {\n name: &'a str,\n dir: &'a str,\n shell: &'a str,\n}\n\n/// Return a Passwd struct with pointers into the provided buf.\n///\n/// # Unsafety\n///\n/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.\nfn get_pw_entry(buf: &mut [i8; 1024]) -> Result> {\n // Create zeroed passwd struct.\n let mut entry: MaybeUninit = MaybeUninit::uninit();\n\n let mut res: *mut libc::passwd = ptr::null_mut();\n\n // Try and read the pw file.\n let uid = unsafe { libc::getuid() };\n let status = unsafe {\n libc::getpwuid_r(uid, entry.as_mut_ptr(), buf.as_mut_ptr() as *mut _, buf.len(), &mut res)\n };\n let entry = unsafe { entry.assume_init() };\n\n", "metadata": {"chunk_id": "doc_63_chunk_2", "original_index": 2, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_2"}, "type": "Document"} +{"page_content": " if status < 0 {\n return Err(Error::new(ErrorKind::Other, \"getpwuid_r failed\"));\n }\n\n if res.is_null() {\n return Err(Error::new(ErrorKind::Other, \"pw not found\"));\n }\n\n // Sanity check.\n assert_eq!(entry.pw_uid, uid);\n\n // Build a borrowed Passwd struct.\n Ok(Passwd {\n name: unsafe { CStr::from_ptr(entry.pw_name).to_str().unwrap() },\n dir: unsafe { CStr::from_ptr(entry.pw_dir).to_str().unwrap() },\n shell: unsafe { CStr::from_ptr(entry.pw_shell).to_str().unwrap() },\n })\n}\n\n", "metadata": {"chunk_id": "doc_63_chunk_3", "original_index": 3, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_3"}, "type": "Document"} +{"page_content": "pub struct Pty {\n child: Child,\n file: File,\n signals: UnixStream,\n}\n\nimpl Pty {\n pub fn child(&self) -> &Child {\n &self.child\n }\n\n pub fn file(&self) -> &File {\n &self.file\n }\n}\n\n/// User information that is required for a new shell session.\nstruct ShellUser {\n user: String,\n home: String,\n shell: String,\n}\n\nimpl ShellUser {\n /// look for shell, username, longname, and home dir in the respective environment variables\n /// before falling back on looking in to `passwd`.\n fn from_env() -> Result {\n let mut buf = [0; 1024];\n let pw = get_pw_entry(&mut buf);\n\n", "metadata": {"chunk_id": "doc_63_chunk_4", "original_index": 4, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_4"}, "type": "Document"} +{"page_content": " let user = match env::var(\"USER\") {\n Ok(user) => user,\n Err(_) => match pw {\n Ok(ref pw) => pw.name.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let home = match env::var(\"HOME\") {\n Ok(home) => home,\n Err(_) => match pw {\n Ok(ref pw) => pw.dir.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let shell = match env::var(\"SHELL\") {\n Ok(shell) => shell,\n Err(_) => match pw {\n Ok(ref pw) => pw.shell.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n Ok(Self { user, home, shell })\n }\n}\n\n#[cfg(not(target_os = \"macos\"))]\nfn default_shell_command(shell: &str, _user: &str) -> Command {\n Command::new(shell)\n}\n\n", "metadata": {"chunk_id": "doc_63_chunk_5", "original_index": 5, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_5"}, "type": "Document"} +{"page_content": "#[cfg(target_os = \"macos\")]\nfn default_shell_command(shell: &str, user: &str) -> Command {\n let shell_name = shell.rsplit('/').next().unwrap();\n\n // On macOS, use the `login` command so the shell will appear as a tty session.\n let mut login_command = Command::new(\"/usr/bin/login\");\n\n // Exec the shell with argv[0] prepended by '-' so it becomes a login shell.\n // `login` normally does this itself, but `-l` disables this.\n let exec = format!(\"exec -a -{} {}\", shell_name, shell);\n\n", "metadata": {"chunk_id": "doc_63_chunk_6", "original_index": 6, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_6"}, "type": "Document"} +{"page_content": " // -f: Bypasses authentication for the already-logged-in user.\n // -l: Skips changing directory to $HOME and prepending '-' to argv[0].\n // -p: Preserves the environment.\n //\n // XXX: we use zsh here over sh due to `exec -a`.\n login_command.args([\"-flp\", user, \"/bin/zsh\", \"-c\", &exec]);\n login_command\n}\n\n/// Create a new TTY and return a handle to interact with it.\npub fn new(config: &Options, window_size: WindowSize, window_id: u64) -> Result {\n let pty = openpty(None, Some(&window_size.to_winsize()))?;\n let (master, slave) = (pty.controller, pty.user);\n let master_fd = master.as_raw_fd();\n let slave_fd = slave.as_raw_fd();\n\n", "metadata": {"chunk_id": "doc_63_chunk_7", "original_index": 7, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_7"}, "type": "Document"} +{"page_content": " #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n if let Ok(mut termios) = termios::tcgetattr(&master) {\n // Set character encoding to UTF-8.\n termios.input_modes.set(InputModes::IUTF8, true);\n let _ = termios::tcsetattr(&master, OptionalActions::Now, &termios);\n }\n\n let user = ShellUser::from_env()?;\n\n let mut builder = if let Some(shell) = config.shell.as_ref() {\n let mut cmd = Command::new(&shell.program);\n cmd.args(shell.args.as_slice());\n cmd\n } else {\n default_shell_command(&user.shell, &user.user)\n };\n\n", "metadata": {"chunk_id": "doc_63_chunk_8", "original_index": 8, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_8"}, "type": "Document"} +{"page_content": " // Setup child stdin/stdout/stderr as slave fd of PTY.\n // Ownership of fd is transferred to the Stdio structs and will be closed by them at the end of\n // this scope. (It is not an issue that the fd is closed three times since File::drop ignores\n // error on libc::close.).\n builder.stdin(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stderr(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stdout(unsafe { Stdio::from_raw_fd(slave_fd) });\n\n", "metadata": {"chunk_id": "doc_63_chunk_9", "original_index": 9, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_9"}, "type": "Document"} +{"page_content": " // Setup shell environment.\n let window_id = window_id.to_string();\n builder.env(\"ALACRITTY_WINDOW_ID\", &window_id);\n builder.env(\"USER\", user.user);\n builder.env(\"HOME\", user.home);\n // Set Window ID for clients relying on X11 hacks.\n builder.env(\"WINDOWID\", window_id);\n for (key, value) in &config.env {\n builder.env(key, value);\n }\n\n unsafe {\n builder.pre_exec(move || {\n // Create a new process group.\n let err = libc::setsid();\n if err == -1 {\n return Err(Error::new(ErrorKind::Other, \"Failed to set session id\"));\n }\n\n set_controlling_terminal(slave_fd);\n\n // No longer need slave/master fds.\n libc::close(slave_fd);\n libc::close(master_fd);\n\n", "metadata": {"chunk_id": "doc_63_chunk_10", "original_index": 10, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_10"}, "type": "Document"} +{"page_content": " libc::signal(libc::SIGCHLD, libc::SIG_DFL);\n libc::signal(libc::SIGHUP, libc::SIG_DFL);\n libc::signal(libc::SIGINT, libc::SIG_DFL);\n libc::signal(libc::SIGQUIT, libc::SIG_DFL);\n libc::signal(libc::SIGTERM, libc::SIG_DFL);\n libc::signal(libc::SIGALRM, libc::SIG_DFL);\n\n Ok(())\n });\n }\n\n // Handle set working directory option.\n if let Some(dir) = &config.working_directory {\n builder.current_dir(dir);\n }\n\n // Prepare signal handling before spawning child.\n let signals = {\n let (sender, recv) = UnixStream::pair()?;\n\n", "metadata": {"chunk_id": "doc_63_chunk_11", "original_index": 11, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_11"}, "type": "Document"} +{"page_content": " // Register the recv end of the pipe for SIGCHLD.\n signal_pipe::register(sigconsts::SIGCHLD, sender)?;\n recv.set_nonblocking(true)?;\n recv\n };\n\n match builder.spawn() {\n Ok(child) => {\n unsafe {\n // Maybe this should be done outside of this function so nonblocking\n // isn't forced upon consumers. Although maybe it should be?\n set_nonblocking(master_fd);\n }\n\n Ok(Pty { child, file: File::from(master), signals })\n },\n Err(err) => Err(Error::new(\n err.kind(),\n format!(\n \"Failed to spawn command '{}': {}\",\n builder.get_program().to_string_lossy(),\n err\n ),\n )),\n }\n}\n\n", "metadata": {"chunk_id": "doc_63_chunk_12", "original_index": 12, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_12"}, "type": "Document"} +{"page_content": "impl Drop for Pty {\n fn drop(&mut self) {\n // Make sure the PTY is terminated properly.\n unsafe {\n libc::kill(self.child.id() as i32, libc::SIGHUP);\n }\n let _ = self.child.wait();\n }\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = File;\n type Writer = File;\n\n #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n unsafe {\n poll.add_with_mode(&self.file, interest, poll_opts)?;\n }\n\n", "metadata": {"chunk_id": "doc_63_chunk_13", "original_index": 13, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_13"}, "type": "Document"} +{"page_content": " unsafe {\n poll.add_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n poll.modify_with_mode(&self.file, interest, poll_opts)?;\n\n", "metadata": {"chunk_id": "doc_63_chunk_14", "original_index": 14, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_14"}, "type": "Document"} +{"page_content": " poll.modify_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n\n #[inline]\n fn deregister(&mut self, poll: &Arc) -> Result<()> {\n poll.delete(&self.file)?;\n poll.delete(&self.signals)\n }\n\n #[inline]\n fn reader(&mut self) -> &mut File {\n &mut self.file\n }\n\n #[inline]\n fn writer(&mut self) -> &mut File {\n &mut self.file\n }\n}\n\n", "metadata": {"chunk_id": "doc_63_chunk_15", "original_index": 15, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_15"}, "type": "Document"} +{"page_content": "impl EventedPty for Pty {\n #[inline]\n fn next_child_event(&mut self) -> Option {\n // See if there has been a SIGCHLD.\n let mut buf = [0u8; 1];\n if let Err(err) = self.signals.read(&mut buf) {\n if err.kind() != ErrorKind::WouldBlock {\n error!(\"Error reading from signal pipe: {}\", err);\n }\n return None;\n }\n\n // Match on the child process.\n match self.child.try_wait() {\n Err(err) => {\n error!(\"Error checking child process termination: {}\", err);\n None\n },\n Ok(None) => None,\n Ok(exit_status) => Some(ChildEvent::Exited(exit_status.and_then(|s| s.code()))),\n }\n }\n}\n\n", "metadata": {"chunk_id": "doc_63_chunk_16", "original_index": 16, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_16"}, "type": "Document"} +{"page_content": "impl OnResize for Pty {\n /// Resize the PTY.\n ///\n /// Tells the kernel that the window size changed with the new pixel\n /// dimensions and line/column counts.\n fn on_resize(&mut self, window_size: WindowSize) {\n let win = window_size.to_winsize();\n\n let res = unsafe { libc::ioctl(self.file.as_raw_fd(), libc::TIOCSWINSZ, &win as *const _) };\n\n if res < 0 {\n die!(\"ioctl TIOCSWINSZ failed: {}\", Error::last_os_error());\n }\n }\n}\n\n", "metadata": {"chunk_id": "doc_63_chunk_17", "original_index": 17, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_17"}, "type": "Document"} +{"page_content": "/// Types that can produce a `Winsize`.\npub trait ToWinsize {\n /// Get a `Winsize`.\n fn to_winsize(self) -> Winsize;\n}\n\nimpl ToWinsize for WindowSize {\n fn to_winsize(self) -> Winsize {\n let ws_row = self.num_lines as libc::c_ushort;\n let ws_col = self.num_cols as libc::c_ushort;\n\n let ws_xpixel = ws_col * self.cell_width as libc::c_ushort;\n let ws_ypixel = ws_row * self.cell_height as libc::c_ushort;\n Winsize { ws_row, ws_col, ws_xpixel, ws_ypixel }\n }\n}\n\nunsafe fn set_nonblocking(fd: c_int) {\n use libc::{fcntl, F_GETFL, F_SETFL, O_NONBLOCK};\n\n let res = fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK);\n assert_eq!(res, 0);\n}\n\n#[test]\nfn test_get_pw_entry() {\n let mut buf: [i8; 1024] = [0; 1024];\n let _pw = get_pw_entry(&mut buf).unwrap();\n}\n", "metadata": {"chunk_id": "doc_63_chunk_18", "original_index": 18, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_18"}, "type": "Document"} +{"page_content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "metadata": {"chunk_id": "doc_64_chunk_0", "original_index": 0, "pid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7_0"}, "type": "Document"} +{"page_content": "use log::{debug, warn};\nuse raw_window_handle::RawDisplayHandle;\n\nuse alacritty_terminal::term::ClipboardType;\n\n#[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\nuse copypasta::nop_clipboard::NopClipboardContext;\n#[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::wayland_clipboard;\n#[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::x11_clipboard::{Primary as X11SelectionClipboard, X11ClipboardContext};\n#[cfg(any(feature = \"x11\", target_os = \"macos\", windows))]\nuse copypasta::ClipboardContext;\nuse copypasta::ClipboardProvider;\n\n", "metadata": {"chunk_id": "doc_65_chunk_0", "original_index": 0, "pid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_0"}, "type": "Document"} +{"page_content": "pub struct Clipboard {\n clipboard: Box,\n selection: Option>,\n}\n\nimpl Clipboard {\n pub unsafe fn new(display: RawDisplayHandle) -> Self {\n match display {\n #[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\n RawDisplayHandle::Wayland(display) => {\n let (selection, clipboard) =\n wayland_clipboard::create_clipboards_from_external(display.display);\n Self { clipboard: Box::new(clipboard), selection: Some(Box::new(selection)) }\n },\n _ => Self::default(),\n }\n }\n\n", "metadata": {"chunk_id": "doc_65_chunk_1", "original_index": 1, "pid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_1"}, "type": "Document"} +{"page_content": " /// Used for tests and to handle missing clipboard provider when built without the `x11`\n /// feature.\n #[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\n pub fn new_nop() -> Self {\n Self { clipboard: Box::new(NopClipboardContext::new().unwrap()), selection: None }\n }\n}\n\nimpl Default for Clipboard {\n fn default() -> Self {\n #[cfg(any(target_os = \"macos\", windows))]\n return Self { clipboard: Box::new(ClipboardContext::new().unwrap()), selection: None };\n\n", "metadata": {"chunk_id": "doc_65_chunk_2", "original_index": 2, "pid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_2"}, "type": "Document"} +{"page_content": " #[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\n return Self {\n clipboard: Box::new(ClipboardContext::new().unwrap()),\n selection: Some(Box::new(X11ClipboardContext::::new().unwrap())),\n };\n\n #[cfg(not(any(feature = \"x11\", target_os = \"macos\", windows)))]\n return Self::new_nop();\n }\n}\n\nimpl Clipboard {\n pub fn store(&mut self, ty: ClipboardType, text: impl Into) {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n (ClipboardType::Selection, None) => return,\n _ => &mut self.clipboard,\n };\n\n", "metadata": {"chunk_id": "doc_65_chunk_3", "original_index": 3, "pid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_3"}, "type": "Document"} +{"page_content": " clipboard.set_contents(text.into()).unwrap_or_else(|err| {\n warn!(\"Unable to store text in clipboard: {}\", err);\n });\n }\n\n pub fn load(&mut self, ty: ClipboardType) -> String {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n _ => &mut self.clipboard,\n };\n\n match clipboard.get_contents() {\n Err(err) => {\n debug!(\"Unable to load text from clipboard: {}\", err);\n String::new()\n },\n Ok(text) => text,\n }\n }\n}\n", "metadata": {"chunk_id": "doc_65_chunk_4", "original_index": 4, "pid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_4"}, "type": "Document"} +{"page_content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n", "metadata": {"chunk_id": "doc_66_chunk_0", "original_index": 0, "pid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_0"}, "type": "Document"} +{"page_content": " interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n", "metadata": {"chunk_id": "doc_66_chunk_1", "original_index": 1, "pid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_1"}, "type": "Document"} +{"page_content": " while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n", "metadata": {"chunk_id": "doc_66_chunk_2", "original_index": 2, "pid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_2"}, "type": "Document"} +{"page_content": " // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n", "metadata": {"chunk_id": "doc_66_chunk_3", "original_index": 3, "pid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_3"}, "type": "Document"} +{"page_content": " /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n", "metadata": {"chunk_id": "doc_66_chunk_4", "original_index": 4, "pid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_4"}, "type": "Document"} +{"page_content": "use std::io::Write;\nuse std::{io, panic};\n\nuse windows_sys::Win32::UI::WindowsAndMessaging::{\n MessageBoxW, MB_ICONERROR, MB_OK, MB_SETFOREGROUND, MB_TASKMODAL,\n};\n\nuse alacritty_terminal::tty::windows::win32_string;\n\n// Install a panic handler that renders the panic in a classical Windows error\n// dialog box as well as writes the panic to STDERR.\npub fn attach_handler() {\n panic::set_hook(Box::new(|panic_info| {\n let _ = writeln!(io::stderr(), \"{}\", panic_info);\n let msg = format!(\"{}\\n\\nPress Ctrl-C to Copy\", panic_info);\n unsafe {\n MessageBoxW(\n 0isize,\n win32_string(&msg).as_ptr(),\n win32_string(\"Alacritty: Runtime Error\").as_ptr(),\n MB_ICONERROR | MB_OK | MB_SETFOREGROUND | MB_TASKMODAL,\n );\n }\n }));\n}\n", "metadata": {"chunk_id": "doc_67_chunk_0", "original_index": 0, "pid": "6e284600be25c8833b866ef0ebfab953a8d0a0f8420cfe56fa17e28664de1b82_0"}, "type": "Document"} +{"page_content": "//! Defines the Row type which makes up lines in the grid.\n\nuse std::cmp::{max, min};\nuse std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};\nuse std::{ptr, slice};\n\n#[cfg(feature = \"serde\")]\nuse serde::{Deserialize, Serialize};\n\nuse crate::grid::GridCell;\nuse crate::index::Column;\nuse crate::term::cell::ResetDiscriminant;\n\n/// A row in the grid.\n#[derive(Default, Clone, Debug)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct Row {\n inner: Vec,\n\n", "metadata": {"chunk_id": "doc_68_chunk_0", "original_index": 0, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_0"}, "type": "Document"} +{"page_content": " /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n", "metadata": {"chunk_id": "doc_68_chunk_1", "original_index": 1, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_1"}, "type": "Document"} +{"page_content": " let mut inner: Vec = Vec::with_capacity(columns);\n\n // This is a slightly optimized version of `std::vec::Vec::resize`.\n unsafe {\n let mut ptr = inner.as_mut_ptr();\n\n for _ in 1..columns {\n ptr::write(ptr, T::default());\n ptr = ptr.offset(1);\n }\n ptr::write(ptr, T::default());\n\n inner.set_len(columns);\n }\n\n Row { inner, occ: 0 }\n }\n\n", "metadata": {"chunk_id": "doc_68_chunk_2", "original_index": 2, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_2"}, "type": "Document"} +{"page_content": " /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n", "metadata": {"chunk_id": "doc_68_chunk_3", "original_index": 3, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_3"}, "type": "Document"} +{"page_content": " /// Reset all cells in the row to the `template` cell.\n #[inline]\n pub fn reset(&mut self, template: &T)\n where\n T: ResetDiscriminant + GridCell,\n D: PartialEq,\n {\n debug_assert!(!self.inner.is_empty());\n\n // Mark all cells as dirty if template cell changed.\n let len = self.inner.len();\n if self.inner[len - 1].discriminant() != template.discriminant() {\n self.occ = len;\n }\n\n // Reset every dirty cell in the row.\n for item in &mut self.inner[0..self.occ] {\n item.reset(template);\n }\n\n self.occ = 0;\n }\n}\n\n#[allow(clippy::len_without_is_empty)]\nimpl Row {\n #[inline]\n pub fn from_vec(vec: Vec, occ: usize) -> Row {\n Row { inner: vec, occ }\n }\n\n", "metadata": {"chunk_id": "doc_68_chunk_4", "original_index": 4, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_4"}, "type": "Document"} +{"page_content": " #[inline]\n pub fn len(&self) -> usize {\n self.inner.len()\n }\n\n #[inline]\n pub fn last(&self) -> Option<&T> {\n self.inner.last()\n }\n\n #[inline]\n pub fn last_mut(&mut self) -> Option<&mut T> {\n self.occ = self.inner.len();\n self.inner.last_mut()\n }\n\n #[inline]\n pub fn append(&mut self, vec: &mut Vec)\n where\n T: GridCell,\n {\n self.occ += vec.len();\n self.inner.append(vec);\n }\n\n #[inline]\n pub fn append_front(&mut self, mut vec: Vec) {\n self.occ += vec.len();\n\n vec.append(&mut self.inner);\n self.inner = vec;\n }\n\n", "metadata": {"chunk_id": "doc_68_chunk_5", "original_index": 5, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_5"}, "type": "Document"} +{"page_content": " /// Check if all cells in the row are empty.\n #[inline]\n pub fn is_clear(&self) -> bool\n where\n T: GridCell,\n {\n self.inner.iter().all(GridCell::is_empty)\n }\n\n #[inline]\n pub fn front_split_off(&mut self, at: usize) -> Vec {\n self.occ = self.occ.saturating_sub(at);\n\n let mut split = self.inner.split_off(at);\n std::mem::swap(&mut split, &mut self.inner);\n split\n }\n}\n\n", "metadata": {"chunk_id": "doc_68_chunk_6", "original_index": 6, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_6"}, "type": "Document"} +{"page_content": "impl<'a, T> IntoIterator for &'a Row {\n type IntoIter = slice::Iter<'a, T>;\n type Item = &'a T;\n\n #[inline]\n fn into_iter(self) -> slice::Iter<'a, T> {\n self.inner.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Row {\n type IntoIter = slice::IterMut<'a, T>;\n type Item = &'a mut T;\n\n #[inline]\n fn into_iter(self) -> slice::IterMut<'a, T> {\n self.occ = self.len();\n self.inner.iter_mut()\n }\n}\n\n", "metadata": {"chunk_id": "doc_68_chunk_7", "original_index": 7, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_7"}, "type": "Document"} +{"page_content": "impl Index for Row {\n type Output = T;\n\n #[inline]\n fn index(&self, index: Column) -> &T {\n &self.inner[index.0]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, index: Column) -> &mut T {\n self.occ = max(self.occ, *index + 1);\n &mut self.inner[index.0]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: Range) -> &[T] {\n &self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: Range) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\n", "metadata": {"chunk_id": "doc_68_chunk_8", "original_index": 8, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_8"}, "type": "Document"} +{"page_content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeTo) -> &[T] {\n &self.inner[..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeTo) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[..(index.end.0)]\n }\n}\n\n", "metadata": {"chunk_id": "doc_68_chunk_9", "original_index": 9, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_9"}, "type": "Document"} +{"page_content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeFrom) -> &[T] {\n &self.inner[(index.start.0)..]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeFrom) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[(index.start.0)..]\n }\n}\n\nimpl Index for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, _: RangeFull) -> &[T] {\n &self.inner[..]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, _: RangeFull) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[..]\n }\n}\n\n", "metadata": {"chunk_id": "doc_68_chunk_10", "original_index": 10, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_10"}, "type": "Document"} +{"page_content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeToInclusive) -> &[T] {\n &self.inner[..=(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeToInclusive) -> &mut [T] {\n self.occ = max(self.occ, *index.end + 1);\n &mut self.inner[..=(index.end.0)]\n }\n}\n", "metadata": {"chunk_id": "doc_68_chunk_11", "original_index": 11, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_11"}, "type": "Document"} +{"page_content": "use log::LevelFilter;\n\nuse alacritty_config_derive::ConfigDeserialize;\n\n/// Debugging options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Debug {\n pub log_level: LevelFilter,\n\n pub print_events: bool,\n\n /// Keep the log file after quitting.\n pub persistent_logging: bool,\n\n /// Should show render timer.\n pub render_timer: bool,\n\n /// Highlight damage information produced by alacritty.\n pub highlight_damage: bool,\n\n /// The renderer alacritty should be using.\n pub renderer: Option,\n\n /// Use EGL as display API if the current platform allows it.\n pub prefer_egl: bool,\n\n /// Record ref test.\n #[config(skip)]\n pub ref_test: bool,\n}\n\n", "metadata": {"chunk_id": "doc_69_chunk_0", "original_index": 0, "pid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293_0"}, "type": "Document"} +{"page_content": "impl Default for Debug {\n fn default() -> Self {\n Self {\n log_level: LevelFilter::Warn,\n print_events: Default::default(),\n persistent_logging: Default::default(),\n render_timer: Default::default(),\n highlight_damage: Default::default(),\n ref_test: Default::default(),\n renderer: Default::default(),\n prefer_egl: Default::default(),\n }\n }\n}\n\n/// The renderer configuration options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum RendererPreference {\n /// OpenGL 3.3 renderer.\n Glsl3,\n\n /// GLES 2 renderer, with optional extensions like dual source blending.\n Gles2,\n\n /// Pure GLES 2 renderer.\n Gles2Pure,\n}\n", "metadata": {"chunk_id": "doc_69_chunk_1", "original_index": 1, "pid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293_1"}, "type": "Document"} +{"page_content": "//! The display subsystem including window management, font rasterization, and\n//! GPU drawing.\n\nuse std::cmp;\nuse std::fmt::{self, Formatter};\nuse std::mem::{self, ManuallyDrop};\nuse std::num::NonZeroU32;\nuse std::ops::{Deref, DerefMut};\nuse std::time::{Duration, Instant};\n\nuse glutin::context::{NotCurrentContext, PossiblyCurrentContext};\nuse glutin::prelude::*;\nuse glutin::surface::{Surface, SwapInterval, WindowSurface};\n\nuse log::{debug, info};\nuse parking_lot::MutexGuard;\nuse raw_window_handle::RawWindowHandle;\nuse serde::{Deserialize, Serialize};\nuse winit::dpi::PhysicalSize;\nuse winit::keyboard::ModifiersState;\nuse winit::window::CursorIcon;\n\nuse crossfont::{Rasterize, Rasterizer, Size as FontSize};\nuse unicode_width::UnicodeWidthChar;\n\n", "metadata": {"chunk_id": "doc_70_chunk_0", "original_index": 0, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_0"}, "type": "Document"} +{"page_content": "use alacritty_terminal::event::{EventListener, OnResize, WindowSize};\nuse alacritty_terminal::grid::Dimensions as TermDimensions;\nuse alacritty_terminal::index::{Column, Direction, Line, Point};\nuse alacritty_terminal::selection::Selection;\nuse alacritty_terminal::term::cell::Flags;\nuse alacritty_terminal::term::{\n self, point_to_viewport, LineDamageBounds, Term, TermDamage, TermMode, MIN_COLUMNS,\n MIN_SCREEN_LINES,\n};\nuse alacritty_terminal::vte::ansi::{CursorShape, NamedColor};\n\n", "metadata": {"chunk_id": "doc_70_chunk_1", "original_index": 1, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_1"}, "type": "Document"} +{"page_content": "use crate::config::font::Font;\nuse crate::config::window::Dimensions;\n#[cfg(not(windows))]\nuse crate::config::window::StartupMode;\nuse crate::config::UiConfig;\nuse crate::display::bell::VisualBell;\nuse crate::display::color::{List, Rgb};\nuse crate::display::content::{RenderableContent, RenderableCursor};\nuse crate::display::cursor::IntoRects;\nuse crate::display::damage::{damage_y_to_viewport_y, DamageTracker};\nuse crate::display::hint::{HintMatch, HintState};\nuse crate::display::meter::Meter;\nuse crate::display::window::Window;\nuse crate::event::{Event, EventType, Mouse, SearchState};\nuse crate::message_bar::{MessageBuffer, MessageType};\nuse crate::renderer::rects::{RenderLine, RenderLines, RenderRect};\nuse crate::renderer::{self, GlyphCache, Renderer};\nuse crate::scheduler::{Scheduler, TimerId, Topic};\nuse crate::string::{ShortenDirection, StrShortener};\n\n", "metadata": {"chunk_id": "doc_70_chunk_2", "original_index": 2, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_2"}, "type": "Document"} +{"page_content": "pub mod color;\npub mod content;\npub mod cursor;\npub mod hint;\npub mod window;\n\nmod bell;\nmod damage;\nmod meter;\n\n/// Label for the forward terminal search bar.\nconst FORWARD_SEARCH_LABEL: &str = \"Search: \";\n\n/// Label for the backward terminal search bar.\nconst BACKWARD_SEARCH_LABEL: &str = \"Backward Search: \";\n\n/// The character used to shorten the visible text like uri preview or search regex.\nconst SHORTENER: char = '…';\n\n/// Color which is used to highlight damaged rects when debugging.\nconst DAMAGE_RECT_COLOR: Rgb = Rgb::new(255, 0, 255);\n\n#[derive(Debug)]\npub enum Error {\n /// Error with window management.\n Window(window::Error),\n\n", "metadata": {"chunk_id": "doc_70_chunk_3", "original_index": 3, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_3"}, "type": "Document"} +{"page_content": " /// Error dealing with fonts.\n Font(crossfont::Error),\n\n /// Error in renderer.\n Render(renderer::Error),\n\n /// Error during context operations.\n Context(glutin::error::Error),\n}\n\nimpl std::error::Error for Error {\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n match self {\n Error::Window(err) => err.source(),\n Error::Font(err) => err.source(),\n Error::Render(err) => err.source(),\n Error::Context(err) => err.source(),\n }\n }\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_4", "original_index": 4, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_4"}, "type": "Document"} +{"page_content": "impl fmt::Display for Error {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n match self {\n Error::Window(err) => err.fmt(f),\n Error::Font(err) => err.fmt(f),\n Error::Render(err) => err.fmt(f),\n Error::Context(err) => err.fmt(f),\n }\n }\n}\n\nimpl From for Error {\n fn from(val: window::Error) -> Self {\n Error::Window(val)\n }\n}\n\nimpl From for Error {\n fn from(val: crossfont::Error) -> Self {\n Error::Font(val)\n }\n}\n\nimpl From for Error {\n fn from(val: renderer::Error) -> Self {\n Error::Render(val)\n }\n}\n\nimpl From for Error {\n fn from(val: glutin::error::Error) -> Self {\n Error::Context(val)\n }\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_5", "original_index": 5, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_5"}, "type": "Document"} +{"page_content": "/// Terminal size info.\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]\npub struct SizeInfo {\n /// Terminal window width.\n width: T,\n\n /// Terminal window height.\n height: T,\n\n /// Width of individual cell.\n cell_width: T,\n\n /// Height of individual cell.\n cell_height: T,\n\n /// Horizontal window padding.\n padding_x: T,\n\n /// Vertical window padding.\n padding_y: T,\n\n /// Number of lines in the viewport.\n screen_lines: usize,\n\n /// Number of columns in the viewport.\n columns: usize,\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_6", "original_index": 6, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_6"}, "type": "Document"} +{"page_content": "impl From> for SizeInfo {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n width: size_info.width as u32,\n height: size_info.height as u32,\n cell_width: size_info.cell_width as u32,\n cell_height: size_info.cell_height as u32,\n padding_x: size_info.padding_x as u32,\n padding_y: size_info.padding_y as u32,\n screen_lines: size_info.screen_lines,\n columns: size_info.screen_lines,\n }\n }\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_7", "original_index": 7, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_7"}, "type": "Document"} +{"page_content": "impl From> for WindowSize {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n num_cols: size_info.columns() as u16,\n num_lines: size_info.screen_lines() as u16,\n cell_width: size_info.cell_width() as u16,\n cell_height: size_info.cell_height() as u16,\n }\n }\n}\n\nimpl SizeInfo {\n #[inline]\n pub fn width(&self) -> T {\n self.width\n }\n\n #[inline]\n pub fn height(&self) -> T {\n self.height\n }\n\n #[inline]\n pub fn cell_width(&self) -> T {\n self.cell_width\n }\n\n #[inline]\n pub fn cell_height(&self) -> T {\n self.cell_height\n }\n\n #[inline]\n pub fn padding_x(&self) -> T {\n self.padding_x\n }\n\n #[inline]\n pub fn padding_y(&self) -> T {\n self.padding_y\n }\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_8", "original_index": 8, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_8"}, "type": "Document"} +{"page_content": "impl SizeInfo {\n #[allow(clippy::too_many_arguments)]\n pub fn new(\n width: f32,\n height: f32,\n cell_width: f32,\n cell_height: f32,\n mut padding_x: f32,\n mut padding_y: f32,\n dynamic_padding: bool,\n ) -> SizeInfo {\n if dynamic_padding {\n padding_x = Self::dynamic_padding(padding_x.floor(), width, cell_width);\n padding_y = Self::dynamic_padding(padding_y.floor(), height, cell_height);\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_9", "original_index": 9, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_9"}, "type": "Document"} +{"page_content": " let lines = (height - 2. * padding_y) / cell_height;\n let screen_lines = cmp::max(lines as usize, MIN_SCREEN_LINES);\n\n let columns = (width - 2. * padding_x) / cell_width;\n let columns = cmp::max(columns as usize, MIN_COLUMNS);\n\n SizeInfo {\n width,\n height,\n cell_width,\n cell_height,\n padding_x: padding_x.floor(),\n padding_y: padding_y.floor(),\n screen_lines,\n columns,\n }\n }\n\n #[inline]\n pub fn reserve_lines(&mut self, count: usize) {\n self.screen_lines = cmp::max(self.screen_lines.saturating_sub(count), MIN_SCREEN_LINES);\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_10", "original_index": 10, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_10"}, "type": "Document"} +{"page_content": " /// Check if coordinates are inside the terminal grid.\n ///\n /// The padding, message bar or search are not counted as part of the grid.\n #[inline]\n pub fn contains_point(&self, x: usize, y: usize) -> bool {\n x <= (self.padding_x + self.columns as f32 * self.cell_width) as usize\n && x > self.padding_x as usize\n && y <= (self.padding_y + self.screen_lines as f32 * self.cell_height) as usize\n && y > self.padding_y as usize\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_11", "original_index": 11, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_11"}, "type": "Document"} +{"page_content": " /// Calculate padding to spread it evenly around the terminal content.\n #[inline]\n fn dynamic_padding(padding: f32, dimension: f32, cell_dimension: f32) -> f32 {\n padding + ((dimension - 2. * padding) % cell_dimension) / 2.\n }\n}\n\nimpl TermDimensions for SizeInfo {\n #[inline]\n fn columns(&self) -> usize {\n self.columns\n }\n\n #[inline]\n fn screen_lines(&self) -> usize {\n self.screen_lines\n }\n\n #[inline]\n fn total_lines(&self) -> usize {\n self.screen_lines()\n }\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_12", "original_index": 12, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_12"}, "type": "Document"} +{"page_content": "#[derive(Default, Clone, Debug, PartialEq, Eq)]\npub struct DisplayUpdate {\n pub dirty: bool,\n\n dimensions: Option>,\n cursor_dirty: bool,\n font: Option,\n}\n\nimpl DisplayUpdate {\n pub fn dimensions(&self) -> Option> {\n self.dimensions\n }\n\n pub fn font(&self) -> Option<&Font> {\n self.font.as_ref()\n }\n\n pub fn cursor_dirty(&self) -> bool {\n self.cursor_dirty\n }\n\n pub fn set_dimensions(&mut self, dimensions: PhysicalSize) {\n self.dimensions = Some(dimensions);\n self.dirty = true;\n }\n\n pub fn set_font(&mut self, font: Font) {\n self.font = Some(font);\n self.dirty = true;\n }\n\n pub fn set_cursor_dirty(&mut self) {\n self.cursor_dirty = true;\n self.dirty = true;\n }\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_13", "original_index": 13, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_13"}, "type": "Document"} +{"page_content": "/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n", "metadata": {"chunk_id": "doc_70_chunk_14", "original_index": 14, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_14"}, "type": "Document"} +{"page_content": " /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n", "metadata": {"chunk_id": "doc_70_chunk_15", "original_index": 15, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_15"}, "type": "Document"} +{"page_content": " let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_16", "original_index": 16, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_16"}, "type": "Document"} +{"page_content": " // Create the GL surface to draw into.\n let surface = renderer::platform::create_gl_surface(\n &gl_context,\n window.inner_size(),\n window.raw_window_handle(),\n )?;\n\n // Make the context current.\n let context = gl_context.make_current(&surface)?;\n\n // Create renderer.\n let mut renderer = Renderer::new(&context, config.debug.renderer)?;\n\n", "metadata": {"chunk_id": "doc_70_chunk_17", "original_index": 17, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_17"}, "type": "Document"} +{"page_content": " // Load font common glyphs to accelerate rendering.\n debug!(\"Filling glyph cache with common glyphs\");\n renderer.with_loader(|mut api| {\n glyph_cache.reset_glyph_cache(&mut api);\n });\n\n let padding = config.window.padding(window.scale_factor as f32);\n let viewport_size = window.inner_size();\n\n // Create new size with at least one column and row.\n let size_info = SizeInfo::new(\n viewport_size.width as f32,\n viewport_size.height as f32,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding && config.window.dimensions().is_none(),\n );\n\n", "metadata": {"chunk_id": "doc_70_chunk_18", "original_index": 18, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_18"}, "type": "Document"} +{"page_content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n info!(\"Padding: {} x {}\", size_info.padding_x(), size_info.padding_y());\n info!(\"Width: {}, Height: {}\", size_info.width(), size_info.height());\n\n // Update OpenGL projection.\n renderer.resize(&size_info);\n\n // Clear screen.\n let background_color = config.colors.primary.background;\n renderer.clear(background_color, config.window_opacity());\n\n // Disable shadows for transparent windows on macOS.\n #[cfg(target_os = \"macos\")]\n window.set_has_shadow(config.window_opacity() >= 1.0);\n\n let is_wayland = matches!(raw_window_handle, RawWindowHandle::Wayland(_));\n\n", "metadata": {"chunk_id": "doc_70_chunk_19", "original_index": 19, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_19"}, "type": "Document"} +{"page_content": " // On Wayland we can safely ignore this call, since the window isn't visible until you\n // actually draw something into it and commit those changes.\n if !is_wayland {\n surface.swap_buffers(&context).expect(\"failed to swap buffers.\");\n renderer.finish();\n }\n\n // Set resize increments for the newly created window.\n if config.window.resize_increments {\n window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n window.set_visible(true);\n\n", "metadata": {"chunk_id": "doc_70_chunk_20", "original_index": 20, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_20"}, "type": "Document"} +{"page_content": " #[allow(clippy::single_match)]\n #[cfg(not(windows))]\n if !_tabbed {\n match config.window.startup_mode {\n #[cfg(target_os = \"macos\")]\n StartupMode::SimpleFullscreen => window.set_simple_fullscreen(true),\n StartupMode::Maximized if !is_wayland => window.set_maximized(true),\n _ => (),\n }\n }\n\n let hint_state = HintState::new(config.hints.alphabet());\n\n let mut damage_tracker = DamageTracker::new(size_info.screen_lines(), size_info.columns());\n damage_tracker.debug = config.debug.highlight_damage;\n\n // Disable vsync.\n if let Err(err) = surface.set_swap_interval(&context, SwapInterval::DontWait) {\n info!(\"Failed to disable vsync: {}\", err);\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_21", "original_index": 21, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_21"}, "type": "Document"} +{"page_content": " Ok(Self {\n context: ManuallyDrop::new(Replaceable::new(context)),\n visual_bell: VisualBell::from(&config.bell),\n renderer: ManuallyDrop::new(renderer),\n surface: ManuallyDrop::new(surface),\n colors: List::from(&config.colors),\n frame_timer: FrameTimer::new(),\n raw_window_handle,\n damage_tracker,\n glyph_cache,\n hint_state,\n size_info,\n font_size,\n window,\n pending_renderer_update: Default::default(),\n vi_highlighted_hint: Default::default(),\n highlighted_hint: Default::default(),\n hint_mouse_point: Default::default(),\n pending_update: Default::default(),\n cursor_hidden: Default::default(),\n meter: Default::default(),\n ime: Default::default(),\n })\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_22", "original_index": 22, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_22"}, "type": "Document"} +{"page_content": " #[inline]\n pub fn gl_context(&self) -> &PossiblyCurrentContext {\n self.context.get()\n }\n\n pub fn make_not_current(&mut self) {\n if self.context.get().is_current() {\n self.context.replace_with(|context| {\n context\n .make_not_current()\n .expect(\"failed to disable context\")\n .treat_as_possibly_current()\n });\n }\n }\n\n pub fn make_current(&self) {\n if !self.context.get().is_current() {\n self.context.make_current(&self.surface).expect(\"failed to make context current\")\n }\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_23", "original_index": 23, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_23"}, "type": "Document"} +{"page_content": " fn swap_buffers(&self) {\n #[allow(clippy::single_match)]\n let res = match (self.surface.deref(), &self.context.get()) {\n #[cfg(not(any(target_os = \"macos\", windows)))]\n (Surface::Egl(surface), PossiblyCurrentContext::Egl(context))\n if matches!(self.raw_window_handle, RawWindowHandle::Wayland(_))\n && !self.damage_tracker.debug =>\n {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n surface.swap_buffers_with_damage(context, &damage)\n },\n (surface, context) => surface.swap_buffers(context),\n };\n if let Err(err) = res {\n debug!(\"error calling swap_buffers: {}\", err);\n }\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_24", "original_index": 24, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_24"}, "type": "Document"} +{"page_content": " /// Update font size and cell dimensions.\n ///\n /// This will return a tuple of the cell width and height.\n fn update_font_size(\n glyph_cache: &mut GlyphCache,\n config: &UiConfig,\n font: &Font,\n ) -> (f32, f32) {\n let _ = glyph_cache.update_font_size(font);\n\n // Compute new cell sizes.\n compute_cell_size(config, &glyph_cache.font_metrics())\n }\n\n /// Reset glyph cache.\n fn reset_glyph_cache(&mut self) {\n let cache = &mut self.glyph_cache;\n self.renderer.with_loader(|mut api| {\n cache.reset_glyph_cache(&mut api);\n });\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_25", "original_index": 25, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_25"}, "type": "Document"} +{"page_content": " // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n", "metadata": {"chunk_id": "doc_70_chunk_26", "original_index": 26, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_26"}, "type": "Document"} +{"page_content": " let (mut cell_width, mut cell_height) =\n (self.size_info.cell_width(), self.size_info.cell_height());\n\n if pending_update.font().is_some() || pending_update.cursor_dirty() {\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.clear_font_cache = true\n }\n\n // Update font size and cell dimensions.\n if let Some(font) = pending_update.font() {\n let cell_dimensions = Self::update_font_size(&mut self.glyph_cache, config, font);\n cell_width = cell_dimensions.0;\n cell_height = cell_dimensions.1;\n\n", "metadata": {"chunk_id": "doc_70_chunk_27", "original_index": 27, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_27"}, "type": "Document"} +{"page_content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n\n // Mark entire terminal as damaged since glyph size could change without cell size\n // changes.\n self.damage_tracker.frame().mark_fully_damaged();\n }\n\n let (mut width, mut height) = (self.size_info.width(), self.size_info.height());\n if let Some(dimensions) = pending_update.dimensions() {\n width = dimensions.width as f32;\n height = dimensions.height as f32;\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_28", "original_index": 28, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_28"}, "type": "Document"} +{"page_content": " let padding = config.window.padding(self.window.scale_factor as f32);\n\n let mut new_size = SizeInfo::new(\n width,\n height,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding,\n );\n\n // Update number of column/lines in the viewport.\n let search_active = search_state.history_index.is_some();\n let message_bar_lines = message_buffer.message().map_or(0, |m| m.text(&new_size).len());\n let search_lines = usize::from(search_active);\n new_size.reserve_lines(message_bar_lines + search_lines);\n\n", "metadata": {"chunk_id": "doc_70_chunk_29", "original_index": 29, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_29"}, "type": "Document"} +{"page_content": " // Update resize increments.\n if config.window.resize_increments {\n self.window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n // Resize when terminal when its dimensions have changed.\n if self.size_info.screen_lines() != new_size.screen_lines\n || self.size_info.columns() != new_size.columns()\n {\n // Resize PTY.\n pty_resize_handle.on_resize(new_size.into());\n\n", "metadata": {"chunk_id": "doc_70_chunk_30", "original_index": 30, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_30"}, "type": "Document"} +{"page_content": " // Resize terminal.\n terminal.resize(new_size);\n\n // Resize damage tracking.\n self.damage_tracker.resize(new_size.screen_lines(), new_size.columns());\n }\n\n // Check if dimensions have changed.\n if new_size != self.size_info {\n // Queue renderer update.\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.resize = true;\n\n // Clear focused search match.\n search_state.clear_focused_match();\n }\n self.size_info = new_size;\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_31", "original_index": 31, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_31"}, "type": "Document"} +{"page_content": " // NOTE: Renderer updates are split off, since platforms like Wayland require resize and other\n // OpenGL operations to be performed right before rendering. Otherwise they could lock the\n // back buffer and render with the previous state. This also solves flickering during resizes.\n //\n /// Update the state of the renderer.\n pub fn process_renderer_update(&mut self) {\n let renderer_update = match self.pending_renderer_update.take() {\n Some(renderer_update) => renderer_update,\n _ => return,\n };\n\n", "metadata": {"chunk_id": "doc_70_chunk_32", "original_index": 32, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_32"}, "type": "Document"} +{"page_content": " // Resize renderer.\n if renderer_update.resize {\n let width = NonZeroU32::new(self.size_info.width() as u32).unwrap();\n let height = NonZeroU32::new(self.size_info.height() as u32).unwrap();\n self.surface.resize(&self.context, width, height);\n }\n\n // Ensure we're modifying the correct OpenGL context.\n self.make_current();\n\n if renderer_update.clear_font_cache {\n self.reset_glyph_cache();\n }\n\n self.renderer.resize(&self.size_info);\n\n info!(\"Padding: {} x {}\", self.size_info.padding_x(), self.size_info.padding_y());\n info!(\"Width: {}, Height: {}\", self.size_info.width(), self.size_info.height());\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_33", "original_index": 33, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_33"}, "type": "Document"} +{"page_content": " /// Draw the screen.\n ///\n /// A reference to Term whose state is being drawn must be provided.\n ///\n /// This call may block if vsync is enabled.\n pub fn draw(\n &mut self,\n mut terminal: MutexGuard<'_, Term>,\n scheduler: &mut Scheduler,\n message_buffer: &MessageBuffer,\n config: &UiConfig,\n search_state: &mut SearchState,\n ) {\n // Collect renderable content before the terminal is dropped.\n let mut content = RenderableContent::new(config, self, &terminal, search_state);\n", "metadata": {"chunk_id": "doc_70_chunk_34", "original_index": 34, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_34"}, "type": "Document"} +{"page_content": " let mut grid_cells = Vec::new();\n for cell in &mut content {\n grid_cells.push(cell);\n }\n let selection_range = content.selection_range();\n let foreground_color = content.color(NamedColor::Foreground as usize);\n let background_color = content.color(NamedColor::Background as usize);\n let display_offset = content.display_offset();\n let cursor = content.cursor();\n\n let cursor_point = terminal.grid().cursor.point;\n let total_lines = terminal.grid().total_lines();\n let metrics = self.glyph_cache.font_metrics();\n let size_info = self.size_info;\n\n let vi_mode = terminal.mode().contains(TermMode::VI);\n let vi_cursor_point = if vi_mode { Some(terminal.vi_mode_cursor.point) } else { None };\n\n", "metadata": {"chunk_id": "doc_70_chunk_35", "original_index": 35, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_35"}, "type": "Document"} +{"page_content": " // Add damage from the terminal.\n if self.collect_damage() {\n match terminal.damage() {\n TermDamage::Full => self.damage_tracker.frame().mark_fully_damaged(),\n TermDamage::Partial(damaged_lines) => {\n for damage in damaged_lines {\n self.damage_tracker.frame().damage_line(damage);\n }\n },\n }\n terminal.reset_damage();\n }\n\n // Drop terminal as early as possible to free lock.\n drop(terminal);\n\n // Add damage from alacritty's UI elements overlapping terminal.\n if self.collect_damage() {\n let requires_full_damage = self.visual_bell.intensity() != 0.\n || self.hint_state.active()\n || search_state.regex().is_some();\n\n", "metadata": {"chunk_id": "doc_70_chunk_36", "original_index": 36, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_36"}, "type": "Document"} +{"page_content": " if requires_full_damage {\n self.damage_tracker.frame().mark_fully_damaged();\n self.damage_tracker.next_frame().mark_fully_damaged();\n }\n\n let vi_cursor_viewport_point =\n vi_cursor_point.and_then(|cursor| point_to_viewport(display_offset, cursor));\n\n self.damage_tracker.damage_vi_cursor(vi_cursor_viewport_point);\n self.damage_tracker.damage_selection(selection_range, display_offset);\n }\n\n // Make sure this window's OpenGL context is active.\n self.make_current();\n\n self.renderer.clear(background_color, config.window_opacity());\n let mut lines = RenderLines::new();\n\n // Optimize loop hint comparator.\n let has_highlighted_hint =\n self.highlighted_hint.is_some() || self.vi_highlighted_hint.is_some();\n\n", "metadata": {"chunk_id": "doc_70_chunk_37", "original_index": 37, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_37"}, "type": "Document"} +{"page_content": " // Draw grid.\n {\n let _sampler = self.meter.sampler();\n\n // Ensure macOS hasn't reset our viewport.\n #[cfg(target_os = \"macos\")]\n self.renderer.set_viewport(&size_info);\n\n let glyph_cache = &mut self.glyph_cache;\n let highlighted_hint = &self.highlighted_hint;\n let vi_highlighted_hint = &self.vi_highlighted_hint;\n let damage_tracker = &mut self.damage_tracker;\n\n self.renderer.draw_cells(\n &size_info,\n glyph_cache,\n grid_cells.into_iter().map(|mut cell| {\n // Underline hints hovered by mouse or vi mode cursor.\n let point = term::viewport_to_point(display_offset, cell.point);\n\n", "metadata": {"chunk_id": "doc_70_chunk_38", "original_index": 38, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_38"}, "type": "Document"} +{"page_content": " if has_highlighted_hint {\n let hyperlink =\n cell.extra.as_ref().and_then(|extra| extra.hyperlink.as_ref());\n if highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n || vi_highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n {\n cell.flags.insert(Flags::UNDERLINE);\n // Damage hints for the current and next frames.\n damage_tracker.frame().damage_point(cell.point);\n damage_tracker.next_frame().damage_point(cell.point);\n }\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_39", "original_index": 39, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_39"}, "type": "Document"} +{"page_content": " // Update underline/strikeout.\n lines.update(&cell);\n\n cell\n }),\n );\n }\n\n let mut rects = lines.rects(&metrics, &size_info);\n\n if let Some(vi_cursor_point) = vi_cursor_point {\n // Indicate vi mode by showing the cursor's position in the top right corner.\n let line = (-vi_cursor_point.line.0 + size_info.bottommost_line().0) as usize;\n let obstructed_column = Some(vi_cursor_point)\n .filter(|point| point.line == -(display_offset as i32))\n .map(|point| point.column);\n self.draw_line_indicator(config, total_lines, obstructed_column, line);\n } else if search_state.regex().is_some() {\n // Show current display offset in vi-less search to indicate match position.\n self.draw_line_indicator(config, total_lines, None, display_offset);\n };\n\n", "metadata": {"chunk_id": "doc_70_chunk_40", "original_index": 40, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_40"}, "type": "Document"} +{"page_content": " // Draw cursor.\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n\n // Push visual bell after url/underline/strikeout rects.\n let visual_bell_intensity = self.visual_bell.intensity();\n if visual_bell_intensity != 0. {\n let visual_bell_rect = RenderRect::new(\n 0.,\n 0.,\n size_info.width(),\n size_info.height(),\n config.bell.color,\n visual_bell_intensity as f32,\n );\n rects.push(visual_bell_rect);\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_41", "original_index": 41, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_41"}, "type": "Document"} +{"page_content": " // Handle IME positioning and search bar rendering.\n let ime_position = match search_state.regex() {\n Some(regex) => {\n let search_label = match search_state.direction() {\n Direction::Right => FORWARD_SEARCH_LABEL,\n Direction::Left => BACKWARD_SEARCH_LABEL,\n };\n\n let search_text = Self::format_search(regex, search_label, size_info.columns());\n\n // Render the search bar.\n self.draw_search(config, &search_text);\n\n // Draw search bar cursor.\n let line = size_info.screen_lines();\n let column = Column(search_text.chars().count() - 1);\n\n", "metadata": {"chunk_id": "doc_70_chunk_42", "original_index": 42, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_42"}, "type": "Document"} +{"page_content": " // Add cursor to search bar if IME is not active.\n if self.ime.preedit().is_none() {\n let fg = config.colors.footer_bar_foreground();\n let shape = CursorShape::Underline;\n let cursor = RenderableCursor::new(Point::new(line, column), shape, fg, false);\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n }\n\n Some(Point::new(line, column))\n },\n None => {\n let num_lines = self.size_info.screen_lines();\n term::point_to_viewport(display_offset, cursor_point)\n .filter(|point| point.line < num_lines)\n },\n };\n\n", "metadata": {"chunk_id": "doc_70_chunk_43", "original_index": 43, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_43"}, "type": "Document"} +{"page_content": " // Handle IME.\n if self.ime.is_enabled() {\n if let Some(point) = ime_position {\n let (fg, bg) = if search_state.regex().is_some() {\n (config.colors.footer_bar_foreground(), config.colors.footer_bar_background())\n } else {\n (foreground_color, background_color)\n };\n\n self.draw_ime_preview(point, fg, bg, &mut rects, config);\n }\n }\n\n if let Some(message) = message_buffer.message() {\n let search_offset = usize::from(search_state.regex().is_some());\n let text = message.text(&size_info);\n\n", "metadata": {"chunk_id": "doc_70_chunk_44", "original_index": 44, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_44"}, "type": "Document"} +{"page_content": " // Create a new rectangle for the background.\n let start_line = size_info.screen_lines() + search_offset;\n let y = size_info.cell_height().mul_add(start_line as f32, size_info.padding_y());\n\n let bg = match message.ty() {\n MessageType::Error => config.colors.normal.red,\n MessageType::Warning => config.colors.normal.yellow,\n };\n\n", "metadata": {"chunk_id": "doc_70_chunk_45", "original_index": 45, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_45"}, "type": "Document"} +{"page_content": " let x = 0;\n let width = size_info.width() as i32;\n let height = (size_info.height() - y) as i32;\n let message_bar_rect =\n RenderRect::new(x as f32, y, width as f32, height as f32, bg, 1.);\n\n // Push message_bar in the end, so it'll be above all other content.\n rects.push(message_bar_rect);\n\n // Always damage message bar, since it could have messages of the same size in it.\n self.damage_tracker.frame().add_viewport_rect(&size_info, x, y as i32, width, height);\n\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n\n", "metadata": {"chunk_id": "doc_70_chunk_46", "original_index": 46, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_46"}, "type": "Document"} +{"page_content": " // Relay messages to the user.\n let glyph_cache = &mut self.glyph_cache;\n let fg = config.colors.primary.background;\n for (i, message_text) in text.iter().enumerate() {\n let point = Point::new(start_line + i, Column(0));\n self.renderer.draw_string(\n point,\n fg,\n bg,\n message_text.chars(),\n &size_info,\n glyph_cache,\n );\n }\n } else {\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_47", "original_index": 47, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_47"}, "type": "Document"} +{"page_content": " self.draw_render_timer(config);\n\n // Draw hyperlink uri preview.\n if has_highlighted_hint {\n let cursor_point = vi_cursor_point.or(Some(cursor_point));\n self.draw_hyperlink_preview(config, cursor_point, display_offset);\n }\n\n // Notify winit that we're about to present.\n self.window.pre_present_notify();\n\n // Highlight damage for debugging.\n if self.damage_tracker.debug {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n let mut rects = Vec::with_capacity(damage.len());\n self.highlight_damage(&mut rects);\n self.renderer.draw_rects(&self.size_info, &metrics, rects);\n }\n\n // Clearing debug highlights from the previous frame requires full redraw.\n self.swap_buffers();\n\n", "metadata": {"chunk_id": "doc_70_chunk_48", "original_index": 48, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_48"}, "type": "Document"} +{"page_content": " if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_49", "original_index": 49, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_49"}, "type": "Document"} +{"page_content": " /// Update the mouse/vi mode cursor hint highlighting.\n ///\n /// This will return whether the highlighted hints changed.\n pub fn update_highlighted_hints(\n &mut self,\n term: &Term,\n config: &UiConfig,\n mouse: &Mouse,\n modifiers: ModifiersState,\n ) -> bool {\n // Update vi mode cursor hint.\n let vi_highlighted_hint = if term.mode().contains(TermMode::VI) {\n let mods = ModifiersState::all();\n let point = term.vi_mode_cursor.point;\n hint::highlighted_at(term, config, point, mods)\n } else {\n None\n };\n let mut dirty = vi_highlighted_hint != self.vi_highlighted_hint;\n self.vi_highlighted_hint = vi_highlighted_hint;\n\n", "metadata": {"chunk_id": "doc_70_chunk_50", "original_index": 50, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_50"}, "type": "Document"} +{"page_content": " // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n", "metadata": {"chunk_id": "doc_70_chunk_51", "original_index": 51, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_51"}, "type": "Document"} +{"page_content": " // Update cursor shape.\n if highlighted_hint.is_some() {\n // If mouse changed the line, we should update the hyperlink preview, since the\n // highlighted hint could be disrupted by the old preview.\n dirty = self.hint_mouse_point.map_or(false, |p| p.line != point.line);\n self.hint_mouse_point = Some(point);\n self.window.set_mouse_cursor(CursorIcon::Pointer);\n } else if self.highlighted_hint.is_some() {\n self.hint_mouse_point = None;\n if term.mode().intersects(TermMode::MOUSE_MODE) && !term.mode().contains(TermMode::VI) {\n self.window.set_mouse_cursor(CursorIcon::Default);\n } else {\n self.window.set_mouse_cursor(CursorIcon::Text);\n }\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_52", "original_index": 52, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_52"}, "type": "Document"} +{"page_content": " dirty |= self.highlighted_hint != highlighted_hint;\n self.highlighted_hint = highlighted_hint;\n\n dirty\n }\n\n #[inline(never)]\n fn draw_ime_preview(\n &mut self,\n point: Point,\n fg: Rgb,\n bg: Rgb,\n rects: &mut Vec,\n config: &UiConfig,\n ) {\n let preedit = match self.ime.preedit() {\n Some(preedit) => preedit,\n None => {\n // In case we don't have preedit, just set the popup point.\n self.window.update_ime_position(point, &self.size_info);\n return;\n },\n };\n\n let num_cols = self.size_info.columns();\n\n", "metadata": {"chunk_id": "doc_70_chunk_53", "original_index": 53, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_53"}, "type": "Document"} +{"page_content": " // Get the visible preedit.\n let visible_text: String = match (preedit.cursor_byte_offset, preedit.cursor_end_offset) {\n (Some(byte_offset), Some(end_offset)) if end_offset > num_cols => StrShortener::new(\n &preedit.text[byte_offset..],\n num_cols,\n ShortenDirection::Right,\n Some(SHORTENER),\n ),\n _ => {\n StrShortener::new(&preedit.text, num_cols, ShortenDirection::Left, Some(SHORTENER))\n },\n }\n .collect();\n\n", "metadata": {"chunk_id": "doc_70_chunk_54", "original_index": 54, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_54"}, "type": "Document"} +{"page_content": " let visible_len = visible_text.chars().count();\n\n let end = cmp::min(point.column.0 + visible_len, num_cols);\n let start = end.saturating_sub(visible_len);\n\n let start = Point::new(point.line, Column(start));\n let end = Point::new(point.line, Column(end - 1));\n\n let glyph_cache = &mut self.glyph_cache;\n let metrics = glyph_cache.font_metrics();\n\n self.renderer.draw_string(\n start,\n fg,\n bg,\n visible_text.chars(),\n &self.size_info,\n glyph_cache,\n );\n\n // Damage preedit inside the terminal viewport.\n if self.collect_damage() && point.line < self.size_info.screen_lines() {\n let damage = LineDamageBounds::new(start.line, 0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_55", "original_index": 55, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_55"}, "type": "Document"} +{"page_content": " // Add underline for preedit text.\n let underline = RenderLine { start, end, color: fg };\n rects.extend(underline.rects(Flags::UNDERLINE, &metrics, &self.size_info));\n\n let ime_popup_point = match preedit.cursor_end_offset {\n Some(cursor_end_offset) if cursor_end_offset != 0 => {\n let is_wide = preedit.text[preedit.cursor_byte_offset.unwrap_or_default()..]\n .chars()\n .next()\n .map(|ch| ch.width() == Some(2))\n .unwrap_or_default();\n\n", "metadata": {"chunk_id": "doc_70_chunk_56", "original_index": 56, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_56"}, "type": "Document"} +{"page_content": " let cursor_column = Column(\n (end.column.0 as isize - cursor_end_offset as isize + 1).max(0) as usize,\n );\n let cursor_point = Point::new(point.line, cursor_column);\n let cursor =\n RenderableCursor::new(cursor_point, CursorShape::HollowBlock, fg, is_wide);\n rects.extend(cursor.rects(&self.size_info, config.cursor.thickness()));\n cursor_point\n },\n _ => end,\n };\n\n", "metadata": {"chunk_id": "doc_70_chunk_57", "original_index": 57, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_57"}, "type": "Document"} +{"page_content": " self.window.update_ime_position(ime_popup_point, &self.size_info);\n }\n\n /// Format search regex to account for the cursor and fullwidth characters.\n fn format_search(search_regex: &str, search_label: &str, max_width: usize) -> String {\n let label_len = search_label.len();\n\n // Skip `search_regex` formatting if only label is visible.\n if label_len > max_width {\n return search_label[..max_width].to_owned();\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_58", "original_index": 58, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_58"}, "type": "Document"} +{"page_content": " // The search string consists of `search_label` + `search_regex` + `cursor`.\n let mut bar_text = String::from(search_label);\n bar_text.extend(StrShortener::new(\n search_regex,\n max_width.wrapping_sub(label_len + 1),\n ShortenDirection::Left,\n Some(SHORTENER),\n ));\n\n // Add place for cursor.\n bar_text.push(' ');\n\n bar_text\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_59", "original_index": 59, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_59"}, "type": "Document"} +{"page_content": " /// Draw preview for the currently highlighted `Hyperlink`.\n #[inline(never)]\n fn draw_hyperlink_preview(\n &mut self,\n config: &UiConfig,\n cursor_point: Option,\n display_offset: usize,\n ) {\n let num_cols = self.size_info.columns();\n let uris: Vec<_> = self\n .highlighted_hint\n .iter()\n .chain(&self.vi_highlighted_hint)\n .filter_map(|hint| hint.hyperlink().map(|hyperlink| hyperlink.uri()))\n .map(|uri| StrShortener::new(uri, num_cols, ShortenDirection::Right, Some(SHORTENER)))\n .collect();\n\n", "metadata": {"chunk_id": "doc_70_chunk_60", "original_index": 60, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_60"}, "type": "Document"} +{"page_content": " if uris.is_empty() {\n return;\n }\n\n // The maximum amount of protected lines including the ones we'll show preview on.\n let max_protected_lines = uris.len() * 2;\n\n // Lines we shouldn't show preview on, because it'll obscure the highlighted hint.\n let mut protected_lines = Vec::with_capacity(max_protected_lines);\n if self.size_info.screen_lines() > max_protected_lines {\n // Prefer to show preview even when it'll likely obscure the highlighted hint, when\n // there's no place left for it.\n protected_lines.push(self.hint_mouse_point.map(|point| point.line));\n protected_lines.push(cursor_point.map(|point| point.line));\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_61", "original_index": 61, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_61"}, "type": "Document"} +{"page_content": " // Find the line in viewport we can draw preview on without obscuring protected lines.\n let viewport_bottom = self.size_info.bottommost_line() - Line(display_offset as i32);\n let viewport_top = viewport_bottom - (self.size_info.screen_lines() - 1);\n let uri_lines = (viewport_top.0..=viewport_bottom.0)\n .rev()\n .map(|line| Some(Line(line)))\n .filter_map(|line| {\n if protected_lines.contains(&line) {\n None\n } else {\n protected_lines.push(line);\n line\n }\n })\n .take(uris.len())\n .flat_map(|line| term::point_to_viewport(display_offset, Point::new(line, Column(0))));\n\n", "metadata": {"chunk_id": "doc_70_chunk_62", "original_index": 62, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_62"}, "type": "Document"} +{"page_content": " let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n for (uri, point) in uris.into_iter().zip(uri_lines) {\n // Damage the uri preview.\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n\n // Damage the uri preview for the next frame as well.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n self.renderer.draw_string(point, fg, bg, uri, &self.size_info, &mut self.glyph_cache);\n }\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_63", "original_index": 63, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_63"}, "type": "Document"} +{"page_content": " /// Draw current search regex.\n #[inline(never)]\n fn draw_search(&mut self, config: &UiConfig, text: &str) {\n // Assure text length is at least num_cols.\n let num_cols = self.size_info.columns();\n let text = format!(\"{:<1$}\", text, num_cols);\n\n let point = Point::new(self.size_info.screen_lines(), Column(0));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n\n", "metadata": {"chunk_id": "doc_70_chunk_64", "original_index": 64, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_64"}, "type": "Document"} +{"page_content": " self.renderer.draw_string(\n point,\n fg,\n bg,\n text.chars(),\n &self.size_info,\n &mut self.glyph_cache,\n );\n }\n\n /// Draw render timer.\n #[inline(never)]\n fn draw_render_timer(&mut self, config: &UiConfig) {\n if !config.debug.render_timer {\n return;\n }\n\n let timing = format!(\"{:.3} usec\", self.meter.average());\n let point = Point::new(self.size_info.screen_lines().saturating_sub(2), Column(0));\n let fg = config.colors.primary.background;\n let bg = config.colors.normal.red;\n\n", "metadata": {"chunk_id": "doc_70_chunk_65", "original_index": 65, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_65"}, "type": "Document"} +{"page_content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, timing.len());\n self.damage_tracker.frame().damage_line(damage);\n // Damage the render timer for the next frame.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, timing.chars(), &self.size_info, glyph_cache);\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_66", "original_index": 66, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_66"}, "type": "Document"} +{"page_content": " /// Draw an indicator for the position of a line in history.\n #[inline(never)]\n fn draw_line_indicator(\n &mut self,\n config: &UiConfig,\n total_lines: usize,\n obstructed_column: Option,\n line: usize,\n ) {\n let columns = self.size_info.columns();\n let text = format!(\"[{}/{}]\", line, total_lines - 1);\n let column = Column(self.size_info.columns().saturating_sub(text.len()));\n let point = Point::new(0, column);\n\n", "metadata": {"chunk_id": "doc_70_chunk_67", "original_index": 67, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_67"}, "type": "Document"} +{"page_content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, columns - 1);\n self.damage_tracker.frame().damage_line(damage);\n // Damage it on the next frame in case it goes away.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let colors = &config.colors;\n let fg = colors.line_indicator.foreground.unwrap_or(colors.primary.background);\n let bg = colors.line_indicator.background.unwrap_or(colors.primary.foreground);\n\n", "metadata": {"chunk_id": "doc_70_chunk_68", "original_index": 68, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_68"}, "type": "Document"} +{"page_content": " // Do not render anything if it would obscure the vi mode cursor.\n if obstructed_column.map_or(true, |obstructed_column| obstructed_column < column) {\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, text.chars(), &self.size_info, glyph_cache);\n }\n }\n\n /// Returns `true` if damage information should be collected, `false` otherwise.\n #[inline]\n fn collect_damage(&self) -> bool {\n matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) || self.damage_tracker.debug\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_69", "original_index": 69, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_69"}, "type": "Document"} +{"page_content": " /// Highlight damaged rects.\n ///\n /// This function is for debug purposes only.\n fn highlight_damage(&self, render_rects: &mut Vec) {\n for damage_rect in &self.damage_tracker.shape_frame_damage(self.size_info.into()) {\n let x = damage_rect.x as f32;\n let height = damage_rect.height as f32;\n let width = damage_rect.width as f32;\n let y = damage_y_to_viewport_y(&self.size_info, damage_rect) as f32;\n let render_rect = RenderRect::new(x, y, width, height, DAMAGE_RECT_COLOR, 0.5);\n\n", "metadata": {"chunk_id": "doc_70_chunk_70", "original_index": 70, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_70"}, "type": "Document"} +{"page_content": " render_rects.push(render_rect);\n }\n }\n\n /// Request a new frame for a window on Wayland.\n fn request_frame(&mut self, scheduler: &mut Scheduler) {\n // Mark that we've used a frame.\n self.window.has_frame = false;\n\n // Get the display vblank interval.\n let monitor_vblank_interval = 1_000_000.\n / self\n .window\n .current_monitor()\n .and_then(|monitor| monitor.refresh_rate_millihertz())\n .unwrap_or(60_000) as f64;\n\n", "metadata": {"chunk_id": "doc_70_chunk_71", "original_index": 71, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_71"}, "type": "Document"} +{"page_content": " // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_72", "original_index": 72, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_72"}, "type": "Document"} +{"page_content": "impl Drop for Display {\n fn drop(&mut self) {\n // Switch OpenGL context before dropping, otherwise objects (like programs) from other\n // contexts might be deleted when dropping renderer.\n self.make_current();\n unsafe {\n ManuallyDrop::drop(&mut self.renderer);\n ManuallyDrop::drop(&mut self.context);\n ManuallyDrop::drop(&mut self.surface);\n }\n }\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_73", "original_index": 73, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_73"}, "type": "Document"} +{"page_content": "/// Input method state.\n#[derive(Debug, Default)]\npub struct Ime {\n /// Whether the IME is enabled.\n enabled: bool,\n\n /// Current IME preedit.\n preedit: Option,\n}\n\nimpl Ime {\n #[inline]\n pub fn set_enabled(&mut self, is_enabled: bool) {\n if is_enabled {\n self.enabled = is_enabled\n } else {\n // Clear state when disabling IME.\n *self = Default::default();\n }\n }\n\n #[inline]\n pub fn is_enabled(&self) -> bool {\n self.enabled\n }\n\n #[inline]\n pub fn set_preedit(&mut self, preedit: Option) {\n self.preedit = preedit;\n }\n\n #[inline]\n pub fn preedit(&self) -> Option<&Preedit> {\n self.preedit.as_ref()\n }\n}\n\n#[derive(Debug, Default, PartialEq, Eq)]\npub struct Preedit {\n /// The preedit text.\n text: String,\n\n", "metadata": {"chunk_id": "doc_70_chunk_74", "original_index": 74, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_74"}, "type": "Document"} +{"page_content": " /// Byte offset for cursor start into the preedit text.\n ///\n /// `None` means that the cursor is invisible.\n cursor_byte_offset: Option,\n\n /// The cursor offset from the end of the preedit in char width.\n cursor_end_offset: Option,\n}\n\nimpl Preedit {\n pub fn new(text: String, cursor_byte_offset: Option) -> Self {\n let cursor_end_offset = if let Some(byte_offset) = cursor_byte_offset {\n // Convert byte offset into char offset.\n let cursor_end_offset =\n text[byte_offset..].chars().fold(0, |acc, ch| acc + ch.width().unwrap_or(1));\n\n", "metadata": {"chunk_id": "doc_70_chunk_75", "original_index": 75, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_75"}, "type": "Document"} +{"page_content": " Some(cursor_end_offset)\n } else {\n None\n };\n\n Self { text, cursor_byte_offset, cursor_end_offset }\n }\n}\n\n/// Pending renderer updates.\n///\n/// All renderer updates are cached to be applied just before rendering, to avoid platform-specific\n/// rendering issues.\n#[derive(Debug, Default, Copy, Clone)]\npub struct RendererUpdate {\n /// Should resize the window.\n resize: bool,\n\n /// Clear font caches.\n clear_font_cache: bool,\n}\n\n/// Struct for safe in-place replacement.\n///\n/// This struct allows easily replacing struct fields that provide `self -> Self` methods in-place,\n/// without having to deal with constantly unwrapping the underlying [`Option`].\nstruct Replaceable(Option);\n\n", "metadata": {"chunk_id": "doc_70_chunk_76", "original_index": 76, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_76"}, "type": "Document"} +{"page_content": "impl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_77", "original_index": 77, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_77"}, "type": "Document"} +{"page_content": "impl Deref for Replaceable {\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n self.get()\n }\n}\n\nimpl DerefMut for Replaceable {\n fn deref_mut(&mut self) -> &mut Self::Target {\n self.get_mut()\n }\n}\n\n/// The frame timer state.\npub struct FrameTimer {\n /// Base timestamp used to compute sync points.\n base: Instant,\n\n /// The last timestamp we synced to.\n last_synced_timestamp: Instant,\n\n /// The refresh rate we've used to compute sync timestamps.\n refresh_interval: Duration,\n}\n\nimpl FrameTimer {\n pub fn new() -> Self {\n let now = Instant::now();\n Self { base: now, last_synced_timestamp: now, refresh_interval: Duration::ZERO }\n }\n\n", "metadata": {"chunk_id": "doc_70_chunk_78", "original_index": 78, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_78"}, "type": "Document"} +{"page_content": " /// Compute the delay that we should use to achieve the target frame\n /// rate.\n pub fn compute_timeout(&mut self, refresh_interval: Duration) -> Duration {\n let now = Instant::now();\n\n // Handle refresh rate change.\n if self.refresh_interval != refresh_interval {\n self.base = now;\n self.last_synced_timestamp = now;\n self.refresh_interval = refresh_interval;\n return refresh_interval;\n }\n\n let next_frame = self.last_synced_timestamp + self.refresh_interval;\n\n", "metadata": {"chunk_id": "doc_70_chunk_79", "original_index": 79, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_79"}, "type": "Document"} +{"page_content": " if next_frame < now {\n // Redraw immediately if we haven't drawn in over `refresh_interval` microseconds.\n let elapsed_micros = (now - self.base).as_micros() as u64;\n let refresh_micros = self.refresh_interval.as_micros() as u64;\n self.last_synced_timestamp =\n now - Duration::from_micros(elapsed_micros % refresh_micros);\n Duration::ZERO\n } else {\n // Redraw on the next `refresh_interval` clock tick.\n self.last_synced_timestamp = next_frame;\n next_frame - now\n }\n }\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_80", "original_index": 80, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_80"}, "type": "Document"} +{"page_content": "/// Calculate the cell dimensions based on font metrics.\n///\n/// This will return a tuple of the cell width and height.\n#[inline]\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n let offset_x = f64::from(config.font.offset.x);\n let offset_y = f64::from(config.font.offset.y);\n (\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n )\n}\n\n", "metadata": {"chunk_id": "doc_70_chunk_81", "original_index": 81, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_81"}, "type": "Document"} +{"page_content": "/// Calculate the size of the window given padding, terminal dimensions and cell size.\nfn window_size(\n config: &UiConfig,\n dimensions: Dimensions,\n cell_width: f32,\n cell_height: f32,\n scale_factor: f32,\n) -> PhysicalSize {\n let padding = config.window.padding(scale_factor);\n\n let grid_width = cell_width * dimensions.columns.max(MIN_COLUMNS) as f32;\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n PhysicalSize::new(width as u32, height as u32)\n}\n", "metadata": {"chunk_id": "doc_70_chunk_82", "original_index": 82, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_82"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_71_chunk_0", "original_index": 0, "pid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "metadata": {"chunk_id": "doc_71_chunk_1", "original_index": 1, "pid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_1"}, "type": "Document"} +{"page_content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n", "metadata": {"chunk_id": "doc_72_chunk_0", "original_index": 0, "pid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_0"}, "type": "Document"} +{"page_content": "from typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n", "metadata": {"chunk_id": "doc_72_chunk_1", "original_index": 1, "pid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_1"}, "type": "Document"} +{"page_content": " \"\"\"\n Params for :class:`VectorSlicer`.\n \"\"\"\n\n INDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n\n def __init__(self, java_params):\n super(_VectorSlicerParams, self).__init__(java_params)\n\n def set_indices(self, *ind: int):\n return self.set(self.INDICES, ind)\n\n def get_indices(self) -> Tuple[int, ...]:\n return self.get(self.INDICES)\n\n @property\n def indices(self) -> Tuple[int, ...]:\n return self.get_indices()\n\n", "metadata": {"chunk_id": "doc_72_chunk_2", "original_index": 2, "pid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_2"}, "type": "Document"} +{"page_content": "\nclass VectorSlicer(JavaFeatureTransformer, _VectorSlicerParams):\n \"\"\"\n A Transformer that transforms a vector to a new feature, which is a sub-array of\n the original feature.It is useful for extracting features from a given vector.\n\n Note that duplicate features are not allowed, so there can be no overlap between\n selected indices. If the max value of the indices is greater than the size of\n the input vector, it throws an IllegalArgumentException.\n \"\"\"\n\n def __init__(self, java_model=None):\n super(VectorSlicer, self).__init__(java_model)\n\n @classmethod\n def _java_transformer_package_name(cls) -> str:\n return \"vectorslicer\"\n\n @classmethod\n def _java_transformer_class_name(cls) -> str:\n return \"VectorSlicer\"\n", "metadata": {"chunk_id": "doc_72_chunk_3", "original_index": 3, "pid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_3"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_73_chunk_0", "original_index": 0, "pid": "883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.iteration.operator.allround;\n\nimport org.apache.flink.annotation.Internal;\n\nimport java.util.function.Supplier;\n\n/**\n * Operators or UDF implements this method would be provided with an supplier that provides the\n * current rounds of the current element.\n */\n@Internal\npublic interface EpochAware {\n\n void setEpochSupplier(Supplier epochSupplier);\n}\n", "metadata": {"chunk_id": "doc_73_chunk_1", "original_index": 1, "pid": "883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd_1"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.\n */\n\n", "metadata": {"chunk_id": "doc_74_chunk_0", "original_index": 0, "pid": "8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.ml.linalg;\n\nimport org.apache.flink.api.common.typeinfo.TypeInfo;\nimport org.apache.flink.ml.linalg.typeinfo.VectorWithNormTypeInfoFactory;\n\n/** A vector with its norm. */\n@TypeInfo(VectorWithNormTypeInfoFactory.class)\npublic class VectorWithNorm {\n public final Vector vector;\n\n public final double l2Norm;\n\n public VectorWithNorm(Vector vector) {\n this(vector, BLAS.norm2(vector));\n }\n\n public VectorWithNorm(Vector vector, double l2Norm) {\n this.vector = vector;\n this.l2Norm = l2Norm;\n }\n}\n", "metadata": {"chunk_id": "doc_74_chunk_1", "original_index": 1, "pid": "8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5_1"}, "type": "Document"} +{"page_content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\nfrom typing import List\n\n", "metadata": {"chunk_id": "doc_75_chunk_0", "original_index": 0, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_0"}, "type": "Document"} +{"page_content": "from pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n", "metadata": {"chunk_id": "doc_75_chunk_1", "original_index": 1, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_1"}, "type": "Document"} +{"page_content": " (1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,\n 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)),\n (1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,\n 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)),\n (1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,\n 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)),\n", "metadata": {"chunk_id": "doc_75_chunk_2", "original_index": 2, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_2"}, "type": "Document"} +{"page_content": " (2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,\n 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)),\n (2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,\n 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)),\n (2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,\n 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)),\n (2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,\n 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)),\n", "metadata": {"chunk_id": "doc_75_chunk_3", "original_index": 3, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_3"}, "type": "Document"} +{"page_content": " (2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,\n 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)),\n (3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,\n 8.22809049e-01, 3.26739456e-01, 7.88268404e-01)),\n (3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,\n 7.33931213e-01, 1.42554396e-01, 7.11225605e-01)),\n (3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,\n 2.51532056e-01, 2.82729807e-01, 7.16245686e-01)),\n", "metadata": {"chunk_id": "doc_75_chunk_4", "original_index": 4, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_4"}, "type": "Document"} +{"page_content": " (3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,\n 5.93898886e-01, 5.68425656e-01, 8.49762330e-01)),\n (3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,\n 5.49682879e-01, 8.59931442e-01, 4.88677978e-02)),\n (4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,\n 9.23509168e-01, 1.16995043e-01, 5.91894106e-03)),\n (4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,\n 4.09421563e-01, 5.09375719e-01, 5.93157850e-01)),\n (4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,\n 3.62654055e-01, 2.43437468e-01, 7.13052838e-01)),\n", "metadata": {"chunk_id": "doc_75_chunk_5", "original_index": 5, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_5"}, "type": "Document"} +{"page_content": " (4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,\n 8.24123861e-01, 5.84074506e-01, 6.54461558e-01)),\n (4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,\n 5.16336729e-01, 9.99776159e-01, 3.15769738e-01)),\n ],\n type_info=Types.ROW_NAMED(\n ['label', 'features'],\n [Types.INT(), DenseVectorTypeInfo()])\n ))\n\n", "metadata": {"chunk_id": "doc_75_chunk_6", "original_index": 6, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_6"}, "type": "Document"} +{"page_content": " def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n", "metadata": {"chunk_id": "doc_75_chunk_7", "original_index": 7, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_7"}, "type": "Document"} +{"page_content": " univariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n self.assertEqual('test_features', univariate_feature_selector.features_col)\n self.assertEqual('test_label', univariate_feature_selector.label_col)\n self.assertEqual('test_output', univariate_feature_selector.output_col)\n self.assertEqual('continuous', univariate_feature_selector.feature_type)\n self.assertEqual('categorical', univariate_feature_selector.label_type)\n self.assertEqual('fpr', univariate_feature_selector.selection_mode)\n self.assertEqual(0.01, univariate_feature_selector.selection_threshold)\n\n", "metadata": {"chunk_id": "doc_75_chunk_8", "original_index": 8, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_8"}, "type": "Document"} +{"page_content": " def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n", "metadata": {"chunk_id": "doc_75_chunk_9", "original_index": 9, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_9"}, "type": "Document"} +{"page_content": " def test_fit_and_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n output = model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_get_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n model_data = model.get_model_data()[0]\n self.assertEqual(['indices'], model_data.get_schema().get_field_names())\n\n", "metadata": {"chunk_id": "doc_75_chunk_10", "original_index": 10, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_10"}, "type": "Document"} +{"page_content": " model_rows = [result for result in\n self.t_env.to_data_stream(model_data).execute_and_collect()]\n self.assertEqual(1, len(model_rows))\n self.assertListEqual([0, 2, 1], model_rows[0][0])\n\n def test_set_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model_a = selector.fit(self.input_table)\n model_data = model_a.get_model_data()[0]\n\n model_b = UnivariateFeatureSelectorModel() \\\n .set_model_data(model_data)\n update_existing_params(model_b, model_a)\n\n output = model_b.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n", "metadata": {"chunk_id": "doc_75_chunk_11", "original_index": 11, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_11"}, "type": "Document"} +{"page_content": " def test_save_load_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n reloaded_selector = self.save_and_reload(selector)\n model = reloaded_selector.fit(self.input_table)\n reloaded_model = self.save_and_reload(model)\n output = reloaded_model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n", "metadata": {"chunk_id": "doc_75_chunk_12", "original_index": 12, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_12"}, "type": "Document"} +{"page_content": " def verify_output_result(\n self, output: Table,\n field_names: List[str],\n feature_col: str,\n output_col: str,\n indices: List[int]):\n collected_results = [result for result in\n self.t_env.to_data_stream(output).execute_and_collect()]\n for item in collected_results:\n item.set_field_names(field_names)\n self.assertEqual(len(indices), item[output_col].size())\n for i in range(0, len(indices)):\n self.assertEqual(item[feature_col].get(indices[i]),\n item[output_col].get(i))\n", "metadata": {"chunk_id": "doc_75_chunk_13", "original_index": 13, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_13"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.regression;\n\n", "metadata": {"chunk_id": "doc_76_chunk_0", "original_index": 0, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_0"}, "type": "Document"} +{"page_content": "import org.apache.flink.api.common.typeinfo.TypeInformation;\nimport org.apache.flink.api.common.typeinfo.Types;\nimport org.apache.flink.api.java.typeutils.RowTypeInfo;\nimport org.apache.flink.ml.linalg.SparseVector;\nimport org.apache.flink.ml.linalg.Vectors;\nimport org.apache.flink.ml.linalg.typeinfo.DenseVectorTypeInfo;\nimport org.apache.flink.ml.regression.linearregression.LinearRegression;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModel;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModelData;\nimport org.apache.flink.ml.util.ParamUtils;\nimport org.apache.flink.ml.util.TestUtils;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.test.util.AbstractTestBase;\nimport org.apache.flink.types.Row;\n\n", "metadata": {"chunk_id": "doc_76_chunk_1", "original_index": 1, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_1"}, "type": "Document"} +{"page_content": "import org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.lang3.RandomUtils;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\n/** Tests {@link LinearRegression} and {@link LinearRegressionModel}. */\npublic class LinearRegressionTest extends AbstractTestBase {\n\n @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();\n\n private StreamExecutionEnvironment env;\n\n private StreamTableEnvironment tEnv;\n\n", "metadata": {"chunk_id": "doc_76_chunk_2", "original_index": 2, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_2"}, "type": "Document"} +{"page_content": " private static final List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(2, 4), 10.0, 1.0),\n Row.of(Vectors.dense(2, 2), 6.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(1, 2), 5.0, 1.0),\n Row.of(Vectors.dense(5, 3), 11.0, 1.0));\n\n private static final double[] expectedCoefficient = new double[] {1.141, 1.829};\n\n private static final double TOLERANCE = 1e-7;\n\n private static final double PREDICTION_TOLERANCE = 0.1;\n\n private static final double COEFFICIENT_TOLERANCE = 0.1;\n\n private Table trainDataTable;\n\n", "metadata": {"chunk_id": "doc_76_chunk_3", "original_index": 3, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_3"}, "type": "Document"} +{"page_content": " @Before\n public void before() {\n env = TestUtils.getExecutionEnvironment();\n tEnv = StreamTableEnvironment.create(env);\n Collections.shuffle(trainData);\n trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n }\n\n", "metadata": {"chunk_id": "doc_76_chunk_4", "original_index": 4, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_4"}, "type": "Document"} +{"page_content": " @SuppressWarnings(\"unchecked\")\n private void verifyPredictionResult(Table output, String labelCol, String predictionCol)\n throws Exception {\n List predResult = IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect());\n for (Row predictionRow : predResult) {\n double label = ((Number) predictionRow.getField(labelCol)).doubleValue();\n double prediction = (double) predictionRow.getField(predictionCol);\n assertTrue(Math.abs(prediction - label) / label < PREDICTION_TOLERANCE);\n }\n }\n\n", "metadata": {"chunk_id": "doc_76_chunk_5", "original_index": 5, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_5"}, "type": "Document"} +{"page_content": " @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n", "metadata": {"chunk_id": "doc_76_chunk_6", "original_index": 6, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_6"}, "type": "Document"} +{"page_content": " linearRegression\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setMaxIter(1000)\n .setTol(0.001)\n .setLearningRate(0.5)\n .setGlobalBatchSize(1000)\n .setReg(0.1)\n .setElasticNet(0.5)\n .setPredictionCol(\"test_predictionCol\");\n assertEquals(\"test_features\", linearRegression.getFeaturesCol());\n assertEquals(\"test_label\", linearRegression.getLabelCol());\n assertEquals(\"test_weight\", linearRegression.getWeightCol());\n", "metadata": {"chunk_id": "doc_76_chunk_7", "original_index": 7, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_7"}, "type": "Document"} +{"page_content": " assertEquals(1000, linearRegression.getMaxIter());\n assertEquals(0.001, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.5, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(1000, linearRegression.getGlobalBatchSize());\n assertEquals(0.1, linearRegression.getReg(), TOLERANCE);\n assertEquals(0.5, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"test_predictionCol\", linearRegression.getPredictionCol());\n }\n\n", "metadata": {"chunk_id": "doc_76_chunk_8", "original_index": 8, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_8"}, "type": "Document"} +{"page_content": " @Test\n public void testOutputSchema() {\n Table tempTable = trainDataTable.as(\"test_features\", \"test_label\", \"test_weight\");\n LinearRegression linearRegression =\n new LinearRegression()\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setPredictionCol(\"test_predictionCol\");\n Table output = linearRegression.fit(trainDataTable).transform(tempTable)[0];\n assertEquals(\n Arrays.asList(\"test_features\", \"test_label\", \"test_weight\", \"test_predictionCol\"),\n output.getResolvedSchema().getColumnNames());\n }\n\n", "metadata": {"chunk_id": "doc_76_chunk_9", "original_index": 9, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_9"}, "type": "Document"} +{"page_content": " @Test\n public void testFitAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "metadata": {"chunk_id": "doc_76_chunk_10", "original_index": 10, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_10"}, "type": "Document"} +{"page_content": " @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "metadata": {"chunk_id": "doc_76_chunk_11", "original_index": 11, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_11"}, "type": "Document"} +{"page_content": " @Test\n public void testSaveLoadAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n linearRegression =\n TestUtils.saveAndReload(\n tEnv,\n linearRegression,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegression::load);\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n", "metadata": {"chunk_id": "doc_76_chunk_12", "original_index": 12, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_12"}, "type": "Document"} +{"page_content": " model =\n TestUtils.saveAndReload(\n tEnv,\n model,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegressionModel::load);\n assertEquals(\n Collections.singletonList(\"coefficient\"),\n model.getModelData()[0].getResolvedSchema().getColumnNames());\n Table output = model.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "metadata": {"chunk_id": "doc_76_chunk_13", "original_index": 13, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_13"}, "type": "Document"} +{"page_content": " @Test\n public void testGetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n assertNotNull(modelData);\n assertEquals(1, modelData.size());\n assertArrayEquals(\n expectedCoefficient, modelData.get(0).coefficient.values, COEFFICIENT_TOLERANCE);\n }\n\n", "metadata": {"chunk_id": "doc_76_chunk_14", "original_index": 14, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_14"}, "type": "Document"} +{"page_content": " @Test\n public void testSetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n LinearRegressionModel newModel = new LinearRegressionModel();\n ParamUtils.updateExistingParams(newModel, model.getParamMap());\n newModel.setModelData(model.getModelData());\n Table output = newModel.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "metadata": {"chunk_id": "doc_76_chunk_15", "original_index": 15, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_15"}, "type": "Document"} +{"page_content": " @Test\n public void testMoreSubtaskThanData() throws Exception {\n List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0));\n\n Table trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n\n", "metadata": {"chunk_id": "doc_76_chunk_16", "original_index": 16, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_16"}, "type": "Document"} +{"page_content": " LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n", "metadata": {"chunk_id": "doc_76_chunk_17", "original_index": 17, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_17"}, "type": "Document"} +{"page_content": " @SuppressWarnings(\"unchecked\")\n private void checkRegularization(double reg, double elasticNet, double[] expectedCoefficient)\n throws Exception {\n LinearRegressionModel model =\n new LinearRegression()\n .setWeightCol(\"weight\")\n .setReg(reg)\n .setElasticNet(elasticNet)\n .fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n final double errorTol = 1e-3;\n assertArrayEquals(expectedCoefficient, modelData.get(0).coefficient.values, errorTol);\n }\n}\n", "metadata": {"chunk_id": "doc_76_chunk_18", "original_index": 18, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_18"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_77_chunk_0", "original_index": 0, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.iteration.datacache.nonkeyed;\n\nimport org.apache.flink.annotation.Internal;\nimport org.apache.flink.api.common.typeutils.TypeSerializer;\nimport org.apache.flink.core.fs.Path;\nimport org.apache.flink.core.memory.DataOutputView;\nimport org.apache.flink.core.memory.DataOutputViewStreamWrapper;\nimport org.apache.flink.core.memory.MemorySegment;\nimport org.apache.flink.runtime.memory.MemoryAllocationException;\nimport org.apache.flink.table.runtime.util.MemorySegmentPool;\n\nimport javax.annotation.Nullable;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Optional;\n\n/** A class that writes cache data to memory segments. */\n@Internal\nclass MemorySegmentWriter implements SegmentWriter {\n\n /** The tool to serialize received records into bytes. */\n private final TypeSerializer serializer;\n\n", "metadata": {"chunk_id": "doc_77_chunk_1", "original_index": 1, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_1"}, "type": "Document"} +{"page_content": " /** The pre-allocated path to hold cached records into the file system. */\n private final Path path;\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The output stream to write serialized content to memory segments. */\n private final ManagedMemoryOutputStream outputStream;\n\n /** The wrapper view of the output stream to be used with TypeSerializer API. */\n private final DataOutputView outputView;\n\n /** The number of records added so far. */\n private int count;\n\n", "metadata": {"chunk_id": "doc_77_chunk_2", "original_index": 2, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_2"}, "type": "Document"} +{"page_content": " MemorySegmentWriter(\n TypeSerializer serializer,\n Path path,\n MemorySegmentPool segmentPool,\n long expectedSize)\n throws MemoryAllocationException {\n this.serializer = serializer;\n this.path = path;\n this.segmentPool = segmentPool;\n this.outputStream = new ManagedMemoryOutputStream(segmentPool, expectedSize);\n this.outputView = new DataOutputViewStreamWrapper(outputStream);\n this.count = 0;\n }\n\n @Override\n public boolean addRecord(T record) throws IOException {\n if (outputStream.getPos() >= DataCacheWriter.MAX_SEGMENT_SIZE) {\n return false;\n }\n try {\n serializer.serialize(record, outputView);\n count++;\n return true;\n } catch (RuntimeException e) {\n if (e.getCause() instanceof MemoryAllocationException) {\n return false;\n }\n throw e;\n }\n }\n\n", "metadata": {"chunk_id": "doc_77_chunk_3", "original_index": 3, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_3"}, "type": "Document"} +{"page_content": " @Override\n public Optional finish() throws IOException {\n if (count > 0) {\n return Optional.of(new Segment(path, count, outputStream.getSegments()));\n } else {\n segmentPool.returnAll(outputStream.getSegments());\n return Optional.empty();\n }\n }\n\n /** An output stream subclass that accepts bytes and writes them to memory segments. */\n private static class ManagedMemoryOutputStream extends OutputStream {\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The number of bytes in a memory segment. */\n private final int pageSize;\n\n /** The memory segments containing written bytes. */\n private final List segments = new ArrayList<>();\n\n", "metadata": {"chunk_id": "doc_77_chunk_4", "original_index": 4, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_4"}, "type": "Document"} +{"page_content": " /** The index of the segment that currently accepts written bytes. */\n private int segmentIndex;\n\n /** The number of bytes in the current segment that have been written. */\n private int segmentOffset;\n\n /** The number of bytes that have been written so far. */\n private long globalOffset;\n\n /** The number of bytes that have been allocated so far. */\n private long allocatedBytes;\n\n public ManagedMemoryOutputStream(MemorySegmentPool segmentPool, long expectedSize)\n throws MemoryAllocationException {\n this.segmentPool = segmentPool;\n this.pageSize = segmentPool.pageSize();\n ensureCapacity(Math.max(expectedSize, 1L));\n }\n\n", "metadata": {"chunk_id": "doc_77_chunk_5", "original_index": 5, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_5"}, "type": "Document"} +{"page_content": " public long getPos() {\n return globalOffset;\n }\n\n public List getSegments() {\n return segments;\n }\n\n @Override\n public void write(int b) throws IOException {\n write(new byte[] {(byte) b}, 0, 1);\n }\n\n @Override\n public void write(@Nullable byte[] b, int off, int len) throws IOException {\n try {\n ensureCapacity(globalOffset + len);\n } catch (MemoryAllocationException e) {\n throw new RuntimeException(e);\n }\n\n", "metadata": {"chunk_id": "doc_77_chunk_6", "original_index": 6, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_6"}, "type": "Document"} +{"page_content": " while (len > 0) {\n int currentLen = Math.min(len, pageSize - segmentOffset);\n segments.get(segmentIndex).put(segmentOffset, b, off, currentLen);\n segmentOffset += currentLen;\n globalOffset += currentLen;\n if (segmentOffset >= pageSize) {\n segmentIndex++;\n segmentOffset = 0;\n }\n off += currentLen;\n len -= currentLen;\n }\n }\n\n private void ensureCapacity(long capacity) throws MemoryAllocationException {\n if (allocatedBytes >= capacity) {\n return;\n }\n\n int required =\n (int) (capacity % pageSize == 0 ? capacity / pageSize : capacity / pageSize + 1)\n - segments.size();\n\n", "metadata": {"chunk_id": "doc_77_chunk_7", "original_index": 7, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_7"}, "type": "Document"} +{"page_content": " List allocatedSegments = new ArrayList<>();\n for (int i = 0; i < required; i++) {\n MemorySegment memorySegment = segmentPool.nextSegment();\n if (memorySegment == null) {\n segmentPool.returnAll(allocatedSegments);\n throw new MemoryAllocationException();\n }\n allocatedSegments.add(memorySegment);\n }\n\n segments.addAll(allocatedSegments);\n allocatedBytes += (long) allocatedSegments.size() * pageSize;\n }\n }\n}\n", "metadata": {"chunk_id": "doc_77_chunk_8", "original_index": 8, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_8"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_78_chunk_0", "original_index": 0, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.iteration.operator.coordinator;\n\nimport org.apache.flink.iteration.IterationID;\nimport org.apache.flink.iteration.operator.event.GloballyAlignedEvent;\nimport org.apache.flink.iteration.operator.event.SubtaskAlignedEvent;\nimport org.apache.flink.runtime.jobgraph.OperatorID;\nimport org.apache.flink.runtime.operators.coordination.EventReceivingTasks;\nimport org.apache.flink.runtime.operators.coordination.MockOperatorCoordinatorContext;\nimport org.apache.flink.runtime.operators.coordination.OperatorEvent;\nimport org.apache.flink.util.TestLogger;\n\nimport org.junit.Test;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.function.BiFunction;\n\nimport static org.junit.Assert.assertEquals;\n\n/** Tests the behavior of {@link HeadOperatorCoordinator}. */\npublic class HeadOperatorCoordinatorTest extends TestLogger {\n\n", "metadata": {"chunk_id": "doc_78_chunk_1", "original_index": 1, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_1"}, "type": "Document"} +{"page_content": " @Test(timeout = 60000L)\n public void testForwardEvents() throws Exception {\n IterationID iterationId = new IterationID();\n List operatorIds = Arrays.asList(new OperatorID(), new OperatorID());\n List parallelisms = Arrays.asList(2, 3);\n List receivingTasks =\n Arrays.asList(\n EventReceivingTasks.createForRunningTasks(),\n EventReceivingTasks.createForRunningTasks());\n List coordinators = new ArrayList<>();\n\n", "metadata": {"chunk_id": "doc_78_chunk_2", "original_index": 2, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_2"}, "type": "Document"} +{"page_content": " int totalParallelism = parallelisms.stream().mapToInt(i -> i).sum();\n\n for (int i = 0; i < operatorIds.size(); ++i) {\n HeadOperatorCoordinator coordinator =\n createCoordinator(iterationId, parallelisms.get(i), totalParallelism);\n setAllSubtasksReady(coordinator, receivingTasks.get(i), parallelisms.get(i));\n coordinators.add(coordinator);\n }\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(2, j, false)));\n checkSentEvent(1, new GloballyAlignedEvent(2, false), receivingTasks, parallelisms);\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(3, 0, false)));\n checkSentEvent(2, new GloballyAlignedEvent(3, true), receivingTasks, parallelisms);\n }\n\n", "metadata": {"chunk_id": "doc_78_chunk_3", "original_index": 3, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_3"}, "type": "Document"} +{"page_content": " private HeadOperatorCoordinator createCoordinator(\n IterationID iterationId, int parallelism, int totalHeadParallelism) {\n MockOperatorCoordinatorContext context =\n new MockOperatorCoordinatorContext(new OperatorID(), parallelism);\n return (HeadOperatorCoordinator)\n new HeadOperatorCoordinator.HeadOperatorCoordinatorProvider(\n new OperatorID(), iterationId, totalHeadParallelism)\n .create(context);\n }\n\n private void setAllSubtasksReady(\n HeadOperatorCoordinator coordinator,\n EventReceivingTasks receivingTasks,\n int parallelism) {\n for (int i = 0; i < parallelism; i++) {\n coordinator.executionAttemptReady(i, 0, receivingTasks.createGatewayForSubtask(i, 0));\n }\n }\n\n", "metadata": {"chunk_id": "doc_78_chunk_4", "original_index": 4, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_4"}, "type": "Document"} +{"page_content": " private void receiveEvent(\n List coordinators,\n List parallelisms,\n BiFunction> eventFactory)\n throws Exception {\n for (int i = 0; i < coordinators.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n List events = eventFactory.apply(i, j);\n for (OperatorEvent event : events) {\n coordinators.get(i).handleEventFromOperator(j, 0, event);\n }\n }\n }\n }\n\n", "metadata": {"chunk_id": "doc_78_chunk_5", "original_index": 5, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_5"}, "type": "Document"} +{"page_content": " private void checkSentEvent(\n int expectedNumEvents,\n GloballyAlignedEvent expectedLastEvent,\n List receivingTasks,\n List parallelisms)\n throws InterruptedException {\n for (int i = 0; i < parallelisms.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n while (true) {\n List events = receivingTasks.get(i).getSentEventsForSubtask(j);\n if (events.size() < expectedNumEvents) {\n Thread.sleep(50);\n continue;\n }\n\n assertEquals(expectedLastEvent, events.get(events.size() - 1));\n break;\n }\n }\n }\n }\n}\n", "metadata": {"chunk_id": "doc_78_chunk_6", "original_index": 6, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_6"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_79_chunk_0", "original_index": 0, "pid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n", "metadata": {"chunk_id": "doc_79_chunk_1", "original_index": 1, "pid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_1"}, "type": "Document"} +{"page_content": " // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n", "metadata": {"chunk_id": "doc_79_chunk_2", "original_index": 2, "pid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_2"}, "type": "Document"} +{"page_content": " // Creates a StopWordsRemover object and initializes its parameters.\n StopWordsRemover remover =\n new StopWordsRemover().setInputCols(\"input\").setOutputCols(\"output\");\n\n // Uses the StopWordsRemover object for feature transformations.\n Table outputTable = remover.transform(inputTable)[0];\n\n // Extracts and displays the results.\n for (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n }\n}\n", "metadata": {"chunk_id": "doc_79_chunk_3", "original_index": 3, "pid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_3"}, "type": "Document"} +{"page_content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n", "metadata": {"chunk_id": "doc_80_chunk_0", "original_index": 0, "pid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_0"}, "type": "Document"} +{"page_content": "# Simple program that creates an IndexToStringModel instance and uses it\n# for feature engineering.\n\nfrom pyflink.common import Types\nfrom pyflink.datastream import StreamExecutionEnvironment\nfrom pyflink.ml.feature.stringindexer import IndexToStringModel\nfrom pyflink.table import StreamTableEnvironment\n\n# create a new StreamExecutionEnvironment\nenv = StreamExecutionEnvironment.get_execution_environment()\n\n# create a StreamTableEnvironment\nt_env = StreamTableEnvironment.create(env)\n\n# generate input data\npredict_table = t_env.from_data_stream(\n env.from_collection([\n (0, 3),\n (1, 2),\n ],\n type_info=Types.ROW_NAMED(\n ['input_col1', 'input_col2'],\n [Types.INT(), Types.INT()])\n ))\n\n", "metadata": {"chunk_id": "doc_80_chunk_1", "original_index": 1, "pid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_1"}, "type": "Document"} +{"page_content": "# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n", "metadata": {"chunk_id": "doc_80_chunk_2", "original_index": 2, "pid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_2"}, "type": "Document"} +{"page_content": "# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n", "metadata": {"chunk_id": "doc_80_chunk_3", "original_index": 3, "pid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_3"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_81_chunk_0", "original_index": 0, "pid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_0"}, "type": "Document"} +{"page_content": "#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\n", "metadata": {"chunk_id": "doc_81_chunk_1", "original_index": 1, "pid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_1"}, "type": "Document"} +{"page_content": "PatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n", "metadata": {"chunk_id": "doc_81_chunk_2", "original_index": 2, "pid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_2"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_82_chunk_0", "original_index": 0, "pid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_0"}, "type": "Document"} +{"page_content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "metadata": {"chunk_id": "doc_82_chunk_1", "original_index": 1, "pid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_1"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_83_chunk_0", "original_index": 0, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_0"}, "type": "Document"} +{"page_content": "#define LOG4CXX_TEST 1\n#include \n\n#include \n#include \"../appenderskeletontestcase.h\"\n#include \n#include \n#include \n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace net\n{\n\nclass MockTriggeringEventEvaluator :\n\tpublic virtual spi::TriggeringEventEvaluator\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(MockTriggeringEventEvaluator)\n\t\tLOG4CXX_CAST_ENTRY(spi::TriggeringEventEvaluator)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n", "metadata": {"chunk_id": "doc_83_chunk_1", "original_index": 1, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_1"}, "type": "Document"} +{"page_content": "\t\tMockTriggeringEventEvaluator()\n\t\t{\n\t\t}\n\n\t\tbool isTriggeringEvent(const spi::LoggingEventPtr& event) override\n\t\t{\n\t\t\treturn true;\n\t\t}\n\tprivate:\n\t\tMockTriggeringEventEvaluator(const MockTriggeringEventEvaluator&);\n\t\tMockTriggeringEventEvaluator& operator=(const MockTriggeringEventEvaluator&);\n};\n}\n}\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\nusing namespace log4cxx::net;\n\nIMPLEMENT_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\n", "metadata": {"chunk_id": "doc_83_chunk_2", "original_index": 2, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_2"}, "type": "Document"} +{"page_content": "\n/**\n Unit tests of log4cxx::SocketAppender\n */\nclass SMTPAppenderTestCase : public AppenderSkeletonTestCase\n{\n\t\tLOGUNIT_TEST_SUITE(SMTPAppenderTestCase);\n\t\t//\n\t\t// tests inherited from AppenderSkeletonTestCase\n\t\t//\n\t\tLOGUNIT_TEST(testDefaultThreshold);\n\t\tLOGUNIT_TEST(testSetOptionThreshold);\n\t\tLOGUNIT_TEST(testTrigger);\n\t\tLOGUNIT_TEST(testInvalid);\n//#define LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n#ifdef LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n\t\t// This test requires the following environment variables:\n\t\t// LOG4CXX_TEST_EMAIL_RECIPIENT - where the email is sent\n\t\t// LOG4CXX_TEST_SMTP_HOST_NAME - the email server\n\t\tLOGUNIT_TEST(testValid);\n#endif\n\t\tLOGUNIT_TEST_SUITE_END();\n\n", "metadata": {"chunk_id": "doc_83_chunk_3", "original_index": 3, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_3"}, "type": "Document"} +{"page_content": "\n\tpublic:\n\n\t\tAppenderSkeleton* createAppenderSkeleton() const\n\t\t{\n\t\t\treturn new log4cxx::net::SMTPAppender();\n\t\t}\n\n\t\tvoid setUp()\n\t\t{\n\t\t}\n\n\t\tvoid tearDown()\n\t\t{\n\t\t\tLogManager::resetConfiguration();\n\t\t}\n\n\t\t/**\n\t\t * Tests that triggeringPolicy element will set evaluator.\n\t\t */\n\t\tvoid testTrigger()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppender1.xml\");\n\t\t\tauto appender = log4cxx::cast(Logger::getRootLogger()->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto evaluator = appender->getEvaluator();\n\t\t\tLOGUNIT_ASSERT(evaluator);\n\t\t\tLOGUNIT_ASSERT_EQUAL(true, evaluator->instanceof(MockTriggeringEventEvaluator::getStaticClass()));\n\t\t}\n\n", "metadata": {"chunk_id": "doc_83_chunk_4", "original_index": 4, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_4"}, "type": "Document"} +{"page_content": "\t\tvoid testInvalid()\n\t\t{\n\t\t\tauto appender = std::make_shared();\n\t\t\tappender->setSMTPHost(LOG4CXX_STR(\"smtp.invalid\"));\n\t\t\tappender->setTo(LOG4CXX_STR(\"you@example.invalid\"));\n\t\t\tappender->setFrom(LOG4CXX_STR(\"me@example.invalid\"));\n\t\t\tappender->setLayout(std::make_shared());\n\t\t\tPool p;\n\t\t\tappender->activateOptions(p);\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\troot->addAppender(appender);\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\");\n\t\t\tLOG4CXX_ERROR(root, \"Sending Message\"); // The DefaultEvaluator should trigger e-mail generation\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(eh->errorReported());\n\t\t}\n\n", "metadata": {"chunk_id": "doc_83_chunk_5", "original_index": 5, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_5"}, "type": "Document"} +{"page_content": "\n\t\tvoid testValid()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppenderValid.xml\");\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\\n\\nThis paragraph should be preceeded by a blank line.\");\n\n\t\t\tauto appender = log4cxx::cast(root->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(!eh->errorReported());\n\t\t}\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(SMTPAppenderTestCase);\n\n", "metadata": {"chunk_id": "doc_83_chunk_6", "original_index": 6, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_6"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n", "metadata": {"chunk_id": "doc_84_chunk_0", "original_index": 0, "pid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_0"}, "type": "Document"} +{"page_content": " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\n", "metadata": {"chunk_id": "doc_84_chunk_1", "original_index": 1, "pid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_1"}, "type": "Document"} +{"page_content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "metadata": {"chunk_id": "doc_84_chunk_2", "original_index": 2, "pid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_2"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_85_chunk_0", "original_index": 0, "pid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a_0"}, "type": "Document"} +{"page_content": "#include \n/* Prevent error C2491: 'std::numpunct<_Elem>::id': definition of dllimport static data member not allowed */\n#if defined(_MSC_VER) && (LOG4CXX_UNICHAR_API || LOG4CXX_LOGCHAR_IS_UNICHAR)\n#define __FORCE_INSTANCE\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\ntypedef std::basic_stringstream LogStream;\n\nLogString LOG4CXX_NS::hexdump(const void* bytes, uint32_t len, HexdumpFlags flags){\n\tLogString ret;\n\tconst uint8_t* bytes_u8 = static_cast(bytes);\n\tLogStream sstream;\n#if LOG4CXX_LOGCHAR_IS_WCHAR\n\tconst wchar_t fill_char = L'0';\n\tconst wchar_t space_fill_char = L' ';\n#else\n\tconst logchar fill_char = '0';\n\tconst logchar space_fill_char = ' ';\n#endif\n\n", "metadata": {"chunk_id": "doc_85_chunk_1", "original_index": 1, "pid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a_1"}, "type": "Document"} +{"page_content": "\tif(flags & HexdumpFlags::AddStartingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\tfor(uint32_t offset = 0; offset < len; offset += 16){\n\t\tif(offset != 0){\n\t\t\tsstream << LOG4CXX_EOL;\n\t\t}\n\n\t\t// Print out the offset\n\t\tsstream << std::hex << std::setw(8) << std::setfill(fill_char) << offset << std::resetiosflags(std::ios_base::fmtflags(0));\n\n\t\tsstream << std::setw(0) << LOG4CXX_STR(\" \");\n\n\t\t// Print out the first 8 bytes\n\t\tfor(int byte = 0; byte < 8; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 8){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tsstream << std::setfill(space_fill_char);\n\t\t\tif(byte != 8){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n", "metadata": {"chunk_id": "doc_85_chunk_2", "original_index": 2, "pid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a_2"}, "type": "Document"} +{"page_content": "\t\tsstream << LOG4CXX_STR(\" \");\n\n\t\t// Print out the last 8 bytes\n\t\tfor(int byte = 8; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 15){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tif(byte != 15){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n\t\t// Print out the ASCII text\n\t\tsstream << LOG4CXX_STR(\" |\");\n\t\tfor(int byte = 0; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tif(std::isprint(bytes_u8[offset + byte])){\n\t\t\t\tlogchar to_append = bytes_u8[offset + byte];\n\t\t\t\tsstream << to_append;\n\t\t\t}else{\n\t\t\t\tsstream << LOG4CXX_STR(\".\");\n\t\t\t}\n\t\t}\n\t\tsstream << LOG4CXX_STR(\"|\");\n\t}\n\n\tif(flags & HexdumpFlags::AddEndingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\treturn sstream.str();\n}\n", "metadata": {"chunk_id": "doc_85_chunk_3", "original_index": 3, "pid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a_3"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\n", "metadata": {"chunk_id": "doc_86_chunk_0", "original_index": 0, "pid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_0"}, "type": "Document"} +{"page_content": "using namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "metadata": {"chunk_id": "doc_86_chunk_1", "original_index": 1, "pid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_1"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \n#include \n#include \n#include \n\n", "metadata": {"chunk_id": "doc_87_chunk_0", "original_index": 0, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_0"}, "type": "Document"} +{"page_content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/threadfilter.h\"\n#include \n#include \n#include \n#include \n#include \"testchar.h\"\n#include \n#include \n#include \n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(MinimumTestCase)\n{\n\tLOGUNIT_TEST_SUITE(MinimumTestCase);\n\tLOGUNIT_TEST(simple);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\troot->removeAllAppenders();\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n", "metadata": {"chunk_id": "doc_87_chunk_1", "original_index": 1, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_1"}, "type": "Document"} +{"page_content": "\tvoid simple()\n\t{\n\t\tLayoutPtr layout = LayoutPtr(new SimpleLayout());\n\t\tAppenderPtr appender = FileAppenderPtr(new FileAppender(layout, LOG4CXX_STR(\"output/simple\"), false));\n\t\troot->addAppender(appender);\n\t\tcommon();\n\n\t\tLOGUNIT_ASSERT(Compare::compare(LOG4CXX_FILE(\"output/simple\"), LOG4CXX_FILE(\"witness/simple\")));\n\t}\n\n\tstd::string createMessage(int i, Pool & pool)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = 0;\n\n\t\t// In the lines below, the logger names are chosen as an aid in\n\t\t// remembering their level values. In general, the logger names\n\t\t// have no bearing to level values.\n\t\tLoggerPtr ERRlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"ERR\"));\n\t\tERRlogger->setLevel(Level::getError());\n\n", "metadata": {"chunk_id": "doc_87_chunk_2", "original_index": 2, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_2"}, "type": "Document"} +{"page_content": "\t\tLoggerPtr INF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF\"));\n\t\tINF->setLevel(Level::getInfo());\n\n\t\tLoggerPtr INF_ERR = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR\"));\n\t\tINF_ERR->setLevel(Level::getError());\n\n\t\tLoggerPtr DEB = Logger::getLogger(LOG4CXX_TEST_STR(\"DEB\"));\n\t\tDEB->setLevel(Level::getDebug());\n\n\t\t// Note: categories with undefined level\n\t\tLoggerPtr INF_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.UNDEF\"));\n\t\tLoggerPtr INF_ERR_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR.UNDEF\"));\n\t\tLoggerPtr UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"UNDEF\"));\n\n", "metadata": {"chunk_id": "doc_87_chunk_3", "original_index": 3, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_3"}, "type": "Document"} +{"page_content": "\t\tstd::string msg(\"Message \");\n\n\t\tPool pool;\n\n\t\t// These should all log.----------------------------\n\t\tLOG4CXX_FATAL(ERRlogger, createMessage(i, pool));\n\t\ti++; //0\n\t\tLOG4CXX_ERROR(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF, createMessage(i, pool));\n\t\ti++; // 2\n\t\tLOG4CXX_ERROR(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_UNDEF, createMessage(i, pool));\n\t\ti++; //6\n\t\tLOG4CXX_ERROR(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR, createMessage(i, pool));\n\t\ti++; // 10\n\t\tLOG4CXX_ERROR(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_ERROR(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n", "metadata": {"chunk_id": "doc_87_chunk_4", "original_index": 4, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_4"}, "type": "Document"} +{"page_content": "\t\tLOG4CXX_FATAL(DEB, createMessage(i, pool));\n\t\ti++; //14\n\t\tLOG4CXX_ERROR(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(DEB, createMessage(i, pool));\n\t\ti++;\n\n\t\t// defaultLevel=DEBUG\n\t\tLOG4CXX_FATAL(UNDEF, createMessage(i, pool));\n\t\ti++; // 19\n\t\tLOG4CXX_ERROR(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\t// The following should not log\n\t\tLOG4CXX_WARN(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_DEBUG(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n", "metadata": {"chunk_id": "doc_87_chunk_5", "original_index": 5, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_5"}, "type": "Document"} +{"page_content": "\t\tLOG4CXX_WARN(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\tLOG4CXX_INFO(INF, LOG4CXX_TEST_STR(\"Messages should bear numbers 0 through 23.\"));\n\t}\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\nprivate:\n\tstatic const File FILTERED;\n};\n\n\nconst File MinimumTestCase::FILTERED(\"output/minimumfiltered\");\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(MinimumTestCase);\n", "metadata": {"chunk_id": "doc_87_chunk_6", "original_index": 6, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_6"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_88_chunk_0", "original_index": 0, "pid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_0"}, "type": "Document"} +{"page_content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "metadata": {"chunk_id": "doc_88_chunk_1", "original_index": 1, "pid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_1"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \"testchar.h\"\n", "metadata": {"chunk_id": "doc_89_chunk_0", "original_index": 0, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_0"}, "type": "Document"} +{"page_content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/iso8601filter.h\"\n#include \"util/absolutetimefilter.h\"\n#include \"util/relativetimefilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/threadfilter.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/filenamefilter.h\"\n#include \"vectorappender.h\"\n#include \n#include \n#include \n#include \n#include \n#include \n\n#define REGEX_STR(x) x\n#define PAT0 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* - Message [0-9]\\\\{1,2\\\\}\")\n#define PAT1 ISO8601_PAT REGEX_STR(\" \") PAT0\n#define PAT2 ABSOLUTE_DATE_AND_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT3 ABSOLUTE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT4 RELATIVE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT5 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* : Message [0-9]\\\\{1,2\\\\}\")\n", "metadata": {"chunk_id": "doc_89_chunk_1", "original_index": 1, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_1"}, "type": "Document"} +{"page_content": "#define PAT6 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) .*patternlayouttest.cpp\\\\([0-9]\\\\{1,4\\\\}\\\\): Message [0-9]\\\\{1,3\\\\}\")\n#define PAT11a REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ log4j.PatternLayoutTest: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT11b REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ root: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT12 REGEX_STR(\"^\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) \")\\\n\tREGEX_STR(\".*patternlayouttest.cpp([0-9]\\\\{1,4\\\\}): \")\\\n\tREGEX_STR(\"Message [0-9]\\\\{1,2\\\\}\")\n#define PAT_MDC_1 REGEX_STR(\"\")\n\n", "metadata": {"chunk_id": "doc_89_chunk_2", "original_index": 2, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_2"}, "type": "Document"} +{"page_content": "using namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(FMTTestCase)\n{\n\tLOGUNIT_TEST_SUITE(FMTTestCase);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST(test1_expanded);\n\tLOGUNIT_TEST(test10);\n//\tLOGUNIT_TEST(test_date);\n\tLOGUNIT_TEST_SUITE_END();\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\tMDC::clear();\n\t\tlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"java.org.apache.log4j.PatternLayoutTest\"));\n\t}\n\n", "metadata": {"chunk_id": "doc_89_chunk_3", "original_index": 3, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_3"}, "type": "Document"} +{"page_content": "\tvoid tearDown()\n\t{\n\t\tMDC::clear();\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid test1()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test1_expanded()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1_expanded.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n", "metadata": {"chunk_id": "doc_89_chunk_4", "original_index": 4, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_4"}, "type": "Document"} +{"page_content": "\tvoid test10()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout10.properties\"));\n\t\tcommon();\n\n\t\tControlFilter filter1;\n\t\tfilter1 << PAT6;\n\t\tThreadFilter filter2;\n\t\tLineNumberFilter filter3;\n\t\tFilenameFilter filenameFilter(__FILE__, \"patternlayouttest.cpp\");\n\n\n\t\tstd::vector filters;\n\t\tfilters.push_back(&filenameFilter);\n\t\tfilters.push_back(&filter1);\n\t\tfilters.push_back(&filter2);\n\t\tfilters.push_back(&filter3);\n\n", "metadata": {"chunk_id": "doc_89_chunk_5", "original_index": 5, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_5"}, "type": "Document"} +{"page_content": "\n\t\ttry\n\t\t{\n\t\t\tTransformer::transform(TEMP, FILTERED, filters);\n\t\t}\n\t\tcatch (UnexpectedFormatException& e)\n\t\t{\n\t\t\tstd::cout << \"UnexpectedFormatException :\" << e.what() << std::endl;\n\t\t\tthrow;\n\t\t}\n\n\t\tLOGUNIT_ASSERT(Compare::compare(FILTERED, LOG4CXX_FILE(\"witness/patternLayout.10\")));\n\t}\n\n\tvoid test_date(){\n\t\tstd::tm tm = {};\n\t\tstd::stringstream ss(\"2013-04-11 08:35:34\");\n\t\tss >> std::get_time(&tm, \"%Y-%m-%d %H:%M:%S\");\n\t\tauto tp = std::chrono::system_clock::from_time_t(std::mktime(&tm));\n\t\tuint64_t micros = std::chrono::duration_cast(tp.time_since_epoch()).count();\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction([micros](){\n\t\t\treturn micros;\n\t\t});\n\n", "metadata": {"chunk_id": "doc_89_chunk_6", "original_index": 6, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_6"}, "type": "Document"} +{"page_content": "\t\tlog4cxx::spi::LoggingEventPtr logEvt = std::make_shared(LOG4CXX_STR(\"foo\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t Level::getInfo(),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t LOG4CXX_STR(\"A Message\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t log4cxx::spi::LocationInfo::getLocationUnavailable());\n\t\tFMTLayout layout(LOG4CXX_STR(\"{d:%Y-%m-%d %H:%M:%S} {message}\"));\n\t\tLogString output;\n\t\tlog4cxx::helpers::Pool pool;\n\t\tlayout.format( output, logEvt, pool);\n\n", "metadata": {"chunk_id": "doc_89_chunk_7", "original_index": 7, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_7"}, "type": "Document"} +{"page_content": "\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction(nullptr);\n\n\t\tLOGUNIT_ASSERT_EQUAL(LOG4CXX_STR(\"2013-04-11 09:35:34 A Message\"), output);\n\t}\n\n\tstd::string createMessage(Pool & pool, int i)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = -1;\n\n\t\tPool pool;\n\n\n\t\tLOG4CXX_DEBUG(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_DEBUG(root, createMessage(pool, i));\n\n\t\tLOG4CXX_INFO(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_INFO(root, createMessage(pool, i));\n\n\t\tLOG4CXX_WARN(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_WARN(root, createMessage(pool, i));\n\n", "metadata": {"chunk_id": "doc_89_chunk_8", "original_index": 8, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_8"}, "type": "Document"} +{"page_content": "\t\tLOG4CXX_ERROR(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_ERROR(root, createMessage(pool, i));\n\n\t\tLOG4CXX_FATAL(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_FATAL(root, createMessage(pool, i));\n\t}\n\n\tprivate:\n\t\tstatic const LogString FILTERED;\n\t\tstatic const LogString TEMP;\n\n};\n\nconst LogString FMTTestCase::TEMP(LOG4CXX_STR(\"output/fmtlayout\"));\nconst LogString FMTTestCase::FILTERED(LOG4CXX_STR(\"output/fmtlayoutfiltered\"));\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(FMTTestCase);\n", "metadata": {"chunk_id": "doc_89_chunk_9", "original_index": 9, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_9"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "metadata": {"chunk_id": "doc_90_chunk_0", "original_index": 0, "pid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_0"}, "type": "Document"} +{"page_content": "#ifndef _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n#define _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n#include \n\nnamespace LOG4CXX_NS\n{\n\nnamespace helpers\n{\n\n/**\n* Writes text to a character-output stream buffering\n* requests to increase efficiency.\n*/\nclass LOG4CXX_EXPORT BufferedWriter : public Writer\n{\n\tprivate:\n\t\tLOG4CXX_DECLARE_PRIVATE_MEMBER_PTR(BufferedWriterPriv, m_priv)\n\n", "metadata": {"chunk_id": "doc_90_chunk_1", "original_index": 1, "pid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_1"}, "type": "Document"} +{"page_content": "\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", "metadata": {"chunk_id": "doc_90_chunk_2", "original_index": 2, "pid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_2"}, "type": "Document"} diff --git a/experiments/data/contextual-embeddings/data_base/qrels.jsonl b/experiments/data/contextual-embeddings/data_base/qrels.jsonl new file mode 100644 index 0000000..659d68e --- /dev/null +++ b/experiments/data/contextual-embeddings/data_base/qrels.jsonl @@ -0,0 +1,248 @@ +{"0": {"5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_0": 1}} +{"1": {"5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_1": 1}} +{"2": {"5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_2": 1, "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_1": 1, "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_0": 1}} +{"3": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_1": 1}} +{"4": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_1": 1}} +{"5": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_2": 1}} +{"6": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_0": 1}} +{"7": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_5": 1}} +{"8": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_6": 1}} +{"9": {"2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_4": 1}} +{"10": {"2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_4": 1}} +{"11": {"531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f_0": 1}} +{"12": {"531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f_0": 1}} +{"13": {"531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f_0": 1}} +{"14": {"0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95_0": 1}} +{"15": {"0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95_0": 1}} +{"16": {"0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95_0": 1}} +{"17": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_0": 1}} +{"18": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_1": 1}} +{"19": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_2": 1}} +{"20": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_1": 1}} +{"21": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_2": 1}} +{"22": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_0": 1}} +{"23": {"a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_2": 1}} +{"24": {"a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_1": 1}} +{"25": {"86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_0": 1}} +{"26": {"f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_4": 1}} +{"27": {"f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_0": 1}} +{"28": {"f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_1": 1}} +{"29": {"17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2_0": 1}} +{"30": {"17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2_0": 1}} +{"31": {"17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2_0": 1}} +{"32": {"db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_7": 1}} +{"33": {"db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_2": 1}} +{"34": {"db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_6": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_5": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_4": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_3": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_2": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_1": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_0": 1}} +{"35": {"db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_7": 1}} +{"36": {"db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_2": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_1": 1}} +{"37": {"ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_0": 1}} +{"38": {"ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_0": 1}} +{"39": {"ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_1": 1}} +{"40": {"ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_0": 1}} +{"41": {"3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_0": 1}} +{"42": {"3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_1": 1}} +{"43": {"3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_1": 1}} +{"44": {"3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_1": 1}} +{"45": {"3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_0": 1}} +{"46": {"b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad_0": 1}} +{"47": {"b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad_0": 1}} +{"48": {"b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad_0": 1}} +{"49": {"d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_0": 1}} +{"50": {"d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_4": 1}} +{"51": {"fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_0": 1}} +{"52": {"fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_0": 1}} +{"53": {"fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_2": 1}} +{"54": {"fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_3": 1}} +{"55": {"fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_5": 1}} +{"56": {"44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3_1": 1}} +{"57": {"bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2_1": 1}} +{"58": {"2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_3": 1}} +{"59": {"2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_3": 1}} +{"60": {"1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_0": 1}} +{"61": {"1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_0": 1}} +{"62": {"538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_0": 1}} +{"63": {"538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_0": 1}} +{"64": {"538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_1": 1}} +{"65": {"538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_3": 1}} +{"66": {"6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_0": 1}} +{"67": {"6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_1": 1, "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_0": 1}} +{"68": {"6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_1": 1}} +{"69": {"6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_1": 1, "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_0": 1}} +{"70": {"6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_2": 1}} +{"71": {"68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_1": 1}} +{"72": {"68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_1": 1}} +{"73": {"68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_1": 1}} +{"74": {"68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_2": 1}} +{"75": {"68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_2": 1}} +{"76": {"8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_3": 1}} +{"77": {"8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_3": 1}} +{"78": {"e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_5": 1}} +{"79": {"e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_9": 1}} +{"80": {"b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36_0": 1}} +{"81": {"b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36_0": 1}} +{"82": {"f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d_0": 1}} +{"83": {"f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d_0": 1}} +{"84": {"f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d_0": 1}} +{"85": {"087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_1": 1}} +{"86": {"130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_2": 1}} +{"87": {"130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_0": 1}} +{"88": {"130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_0": 1}} +{"89": {"130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_1": 1}} +{"90": {"130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_2": 1}} +{"91": {"43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd_1": 1}} +{"92": {"43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd_1": 1}} +{"93": {"4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8_0": 1}} +{"94": {"4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8_0": 1}} +{"95": {"4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8_0": 1}} +{"96": {"e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_2": 1}} +{"97": {"e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_3": 1, "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_2": 1, "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_1": 1, "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_0": 1}} +{"98": {"f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_23": 1, "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_16": 1}} +{"99": {"f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_20": 1, "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_18": 1, "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_15": 1, "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_0": 1}} +{"100": {"bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_3": 1}} +{"101": {"bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_3": 1}} +{"102": {"bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_5": 1}} +{"103": {"f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0_0": 1}} +{"104": {"f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0_0": 1}} +{"105": {"f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0_0": 1}} +{"106": {"f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0_0": 1}} +{"107": {"26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_2": 1, "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_1": 1, "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_0": 1}} +{"108": {"26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_1": 1}} +{"109": {"26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_4": 1}} +{"110": {"26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_2": 1}} +{"111": {"e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_3": 1}} +{"112": {"e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_2": 1}} +{"113": {"e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_9": 1}} +{"114": {"c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_6": 1, "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_4": 1, "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_3": 1, "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_2": 1, "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_1": 1, "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_0": 1}} +{"115": {"c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_7": 1}} +{"116": {"eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09_1": 1}} +{"117": {"eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09_1": 1}} +{"118": {"a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b_0": 1}} +{"119": {"a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b_1": 1}} +{"120": {"a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b_1": 1}} +{"121": {"2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_1": 1}} +{"122": {"2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_3": 1}} +{"123": {"b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_9": 1, "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_3": 1, "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_1": 1, "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_0": 1}} +{"124": {"b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_5": 1}} +{"125": {"b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_1": 1}} +{"126": {"cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_3": 1}} +{"127": {"cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_11": 1, "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_10": 1, "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_7": 1, "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_3": 1}} +{"128": {"2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_1": 1}} +{"129": {"2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_1": 1, "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_0": 1}} +{"130": {"2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_1": 1}} +{"131": {"2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_1": 1}} +{"132": {"bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_1": 1}} +{"133": {"bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_1": 1}} +{"134": {"4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_6": 1, "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_3": 1, "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_1": 1, "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_0": 1}} +{"135": {"4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_6": 1, "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_5": 1}} +{"136": {"4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_8": 1, "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_7": 1}} +{"137": {"d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_2": 1}} +{"138": {"d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_1": 1}} +{"139": {"d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_1": 1, "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_0": 1}} +{"140": {"d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_11": 1}} +{"141": {"36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_1": 1}} +{"142": {"36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_2": 1}} +{"143": {"36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_4": 1}} +{"144": {"36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_5": 1}} +{"145": {"b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e_1": 1, "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e_0": 1}} +{"146": {"7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298_1": 1}} +{"147": {"7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298_2": 1}} +{"148": {"0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_12": 1}} +{"149": {"adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823_0": 1}} +{"150": {"adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823_0": 1}} +{"151": {"1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84_0": 1}} +{"152": {"1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84_2": 1}} +{"153": {"9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_0": 1}} +{"154": {"9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_0": 1}} +{"155": {"639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_0": 1}} +{"156": {"639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_1": 1}} +{"157": {"639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_3": 1}} +{"158": {"21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_0": 1}} +{"159": {"21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_1": 1}} +{"160": {"21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_4": 1}} +{"161": {"58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_0": 1}} +{"162": {"58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_0": 1}} +{"163": {"58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_1": 1}} +{"164": {"58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_1": 1}} +{"165": {"58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_1": 1}} +{"166": {"0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_1": 1, "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_0": 1}} +{"167": {"0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_2": 1}} +{"168": {"0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_3": 1}} +{"169": {"ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26_0": 1}} +{"170": {"ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26_1": 1}} +{"171": {"8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_1": 1}} +{"172": {"8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_2": 1}} +{"173": {"63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_0": 1}} +{"174": {"63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_1": 1}} +{"175": {"63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_3": 1}} +{"176": {"28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_2": 1}} +{"177": {"e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7_0": 1}} +{"178": {"e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7_0": 1}} +{"179": {"e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7_0": 1}} +{"180": {"e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7_0": 1}} +{"181": {"dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_2": 1}} +{"182": {"2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_2": 1}} +{"183": {"2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_4": 1}} +{"184": {"2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_3": 1, "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_2": 1, "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_1": 1, "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_0": 1}} +{"185": {"6e284600be25c8833b866ef0ebfab953a8d0a0f8420cfe56fa17e28664de1b82_0": 1}} +{"186": {"c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_1": 1}} +{"187": {"c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_3": 1}} +{"188": {"c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_3": 1}} +{"189": {"3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293_0": 1}} +{"190": {"3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293_0": 1}} +{"191": {"96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_14": 1}} +{"192": {"96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_49": 1}} +{"193": {"96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_77": 1, "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_72": 1, "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_51": 1, "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_26": 1, "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_16": 1, "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_15": 1}} +{"194": {"9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_1": 1}} +{"195": {"9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_1": 1}} +{"196": {"9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_1": 1}} +{"197": {"9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_1": 1}} +{"198": {"139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_1": 1}} +{"199": {"139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_1": 1}} +{"200": {"139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_1": 1}} +{"201": {"883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd_1": 1}} +{"202": {"8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5_1": 1}} +{"203": {"76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_7": 1, "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_1": 1}} +{"204": {"76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_7": 1}} +{"205": {"76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_9": 1}} +{"206": {"05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_6": 1}} +{"207": {"05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_11": 1}} +{"208": {"05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_17": 1}} +{"209": {"7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_3": 1}} +{"210": {"e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_2": 1, "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_1": 1}} +{"211": {"e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_3": 1}} +{"212": {"b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_2": 1}} +{"213": {"b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_2": 1, "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_1": 1, "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_0": 1}} +{"214": {"b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_1": 1}} +{"215": {"85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_2": 1}} +{"216": {"85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_2": 1}} +{"217": {"85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_3": 1}} +{"218": {"d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_2": 1}} +{"219": {"d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_0": 1}} +{"220": {"d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_1": 1}} +{"221": {"5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_1": 1}} +{"222": {"5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_1": 1}} +{"223": {"5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_1": 1}} +{"224": {"5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_1": 1}} +{"225": {"968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_3": 1, "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_1": 1, "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_0": 1}} +{"226": {"968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_6": 1}} +{"227": {"652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_1": 1, "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_0": 1}} +{"228": {"652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_2": 1}} +{"229": {"652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_2": 1}} +{"230": {"652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_2": 1}} +{"231": {"652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_1": 1}} +{"232": {"6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a_1": 1}} +{"233": {"b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_0": 1}} +{"234": {"b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_1": 1}} +{"235": {"b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_1": 1}} +{"236": {"b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_0": 1}} +{"237": {"e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_1": 1}} +{"238": {"e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_3": 1, "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_2": 1}} +{"239": {"0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_1": 1}} +{"240": {"0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_1": 1}} +{"241": {"0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_1": 1}} +{"242": {"0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_1": 1}} +{"243": {"fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_3": 1, "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_2": 1, "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_0": 1}} +{"244": {"fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_8": 1}} +{"245": {"4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_2": 1}} +{"246": {"4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_2": 1}} +{"247": {"4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_2": 1}} diff --git a/experiments/data/contextual-embeddings/data_base/queries.jsonl b/experiments/data/contextual-embeddings/data_base/queries.jsonl new file mode 100644 index 0000000..b966ac7 --- /dev/null +++ b/experiments/data/contextual-embeddings/data_base/queries.jsonl @@ -0,0 +1,248 @@ +{"id": "0", "text": "What is the purpose of the DiffExecutor struct?"} +{"id": "1", "text": "How do you create a new DiffExecutor instance?"} +{"id": "2", "text": "What happens in the `run_target` method of the DiffExecutor?"} +{"id": "3", "text": "What is the purpose of the SIGNALS and SIGNALS_PTR static variables?"} +{"id": "4", "text": "How does the harness closure work?"} +{"id": "5", "text": "What is the purpose of the StdMapObserver?"} +{"id": "6", "text": "What feedbacks are used in this fuzzer?"} +{"id": "7", "text": "How is the initial corpus generated?"} +{"id": "8", "text": "What stages are used in the fuzzer?"} +{"id": "9", "text": "What does the `OomObserver` struct do?"} +{"id": "10", "text": "How do I create a new `OomObserver`?"} +{"id": "11", "text": "What does the function `both_require` do?"} +{"id": "12", "text": "How does `both_require` check for the sequence 'a', 'b', 'c'?"} +{"id": "13", "text": "What is the purpose of the `len` parameter in `both_require`?"} +{"id": "14", "text": "What does the vuln() function do?"} +{"id": "15", "text": "How is input normally read in the main() function?"} +{"id": "16", "text": "What input condition causes the program to abort in the main() function?"} +{"id": "17", "text": "What is the purpose of the `MergeScheduler` struct?"} +{"id": "18", "text": "How does the `on_add` method of the `MergeScheduler` work?"} +{"id": "19", "text": "What is the purpose of the `removable()` method in the `MergeScheduler`?"} +{"id": "20", "text": "How does the `on_remove` method of the `MergeScheduler` work?"} +{"id": "21", "text": "What is the purpose of the `current()` method in the `MergeScheduler`?"} +{"id": "22", "text": "Why is the `next()` method of the `MergeScheduler` unimplemented?"} +{"id": "23", "text": "How are the `Fp` and `Lr` registers defined as aliases in the `Regs` enum?"} +{"id": "24", "text": "What is the purpose of the `get_backdoor_arch_regs` function?"} +{"id": "25", "text": "How do I get the `EnumMap` of backdoor architecture registers?"} +{"id": "26", "text": "How do you convert a `NautilusInput` to a `BytesInput`?"} +{"id": "27", "text": "How do you get the `Tree` representation of a `NautilusInput`?"} +{"id": "28", "text": "What traits does `NautilusInput` implement?"} +{"id": "29", "text": "How do you initialize the logger?"} +{"id": "30", "text": "How is the log file created?"} +{"id": "31", "text": "What logger implementation is being used?"} +{"id": "32", "text": "How do you register a new type in the Registry?"} +{"id": "33", "text": "What is the purpose of the `_real_register` method?"} +{"id": "34", "text": "How can you retrieve registered types from the Registry?"} +{"id": "35", "text": "How are targeted types handled in the Registry?"} +{"id": "36", "text": "What is the purpose of the `_modules` set in the Registry?"} +{"id": "37", "text": "What does the Octal class do?"} +{"id": "38", "text": "How does the decode method of the Octal class work?"} +{"id": "39", "text": "What does the getTarget method of the Octal class do?"} +{"id": "40", "text": "What external dependencies does the Octal class have?"} +{"id": "41", "text": "How does the decode method of the A1z26 class work?"} +{"id": "42", "text": "What is the purpose of the priority method in the A1z26 class?"} +{"id": "43", "text": "What does the getParams method do in the A1z26 class?"} +{"id": "44", "text": "What is the purpose of the getTarget method?"} +{"id": "45", "text": "How are the delimiters in the input ciphertext handled?"} +{"id": "46", "text": "What is the purpose of the priority method in the Base58_ripple class?"} +{"id": "47", "text": "What does the getParams method of the Base58_ripple class do?"} +{"id": "48", "text": "What is the purpose of the getTarget method in the Base58_ripple class?"} +{"id": "49", "text": "How are the character and word boundaries determined in the Morse code decoding process?"} +{"id": "50", "text": "What is the purpose of the priority method in the Morse_code class?"} +{"id": "51", "text": "What does the `getInfo` method of the `Soundex` class do?"} +{"id": "52", "text": "What does the `getTarget` method of the `Soundex` class return?"} +{"id": "53", "text": "How does the `attemptCrack` method of the `Soundex` class attempt to crack a Soundex-encoded ciphertext?"} +{"id": "54", "text": "What does the `sortlistwithdict` method of the `Soundex` class do?"} +{"id": "55", "text": "What parameters does the `Soundex` class take in its constructor?"} +{"id": "56", "text": "What parameters can be configured for the Tap_code decoder?"} +{"id": "57", "text": "How does the CipheyDists class handle configuration?"} +{"id": "58", "text": "What is the priority method used for in the Base69 class?"} +{"id": "59", "text": "How are the parameters for the Base69 class specified?"} +{"id": "60", "text": "What encryption schemes do the tests cover?"} +{"id": "61", "text": "What is the expected decrypted plaintext used in most of the tests?"} +{"id": "62", "text": "What does the MakeBools function return?"} +{"id": "63", "text": "How does the MakeFixedStrings function work?"} +{"id": "64", "text": "What is the purpose of the long string in the MakeStrings function?"} +{"id": "65", "text": "What UUID values are returned by the MakeUUIDs function?"} +{"id": "66", "text": "How can I append a column to a ColumnTuple?"} +{"id": "67", "text": "How do I load column data from an input stream into a ColumnTuple?"} +{"id": "68", "text": "How can I clear the data in a ColumnTuple?"} +{"id": "69", "text": "How do I get the number of rows in a ColumnTuple?"} +{"id": "70", "text": "What is the purpose of the ColumnTupleT class?"} +{"id": "71", "text": "How can you append elements to a ColumnIPv4 instance?"} +{"id": "72", "text": "How can you access elements from a ColumnIPv4 instance?"} +{"id": "73", "text": "How can you append the content of another column to a ColumnIPv4 instance?"} +{"id": "74", "text": "How can you get the number of rows in a ColumnIPv4 instance?"} +{"id": "75", "text": "How can you create a slice of a ColumnIPv4 instance?"} +{"id": "76", "text": "What does the GetTypeMeta() function do?"} +{"id": "77", "text": "How does the CompateStringsCaseInsensitive() function compare two strings case-insensitively?"} +{"id": "78", "text": "What regular expression syntax is supported on Windows and Mac for death tests?"} +{"id": "79", "text": "What is a known caveat with \"threadsafe\" style death tests?"} +{"id": "80", "text": "How do you read a string using WireFormat?"} +{"id": "81", "text": "How do you read a 64-bit unsigned integer using WireFormat?"} +{"id": "82", "text": "What is the purpose of the LoadPrefix function in the Column class?"} +{"id": "83", "text": "What is the purpose of the SavePrefix function in the Column class?"} +{"id": "84", "text": "How does the Save function in the Column class work?"} +{"id": "85", "text": "How does the ColumnLowCardinality class handle null values?"} +{"id": "86", "text": "What geometric data types are supported by the code?"} +{"id": "87", "text": "How can you append an element to a ColumnGeo?"} +{"id": "88", "text": "How can you access an element in a ColumnGeo?"} +{"id": "89", "text": "How can you append the content of one ColumnGeo to another?"} +{"id": "90", "text": "How can you clear the data of a ColumnGeo?"} +{"id": "91", "text": "How do you construct a ProjectedIterator?"} +{"id": "92", "text": "How do you increment and decrement a ProjectedIterator?"} +{"id": "93", "text": "What are the possible values for the ConsoleOutput enum?"} +{"id": "94", "text": "What package is the ConsoleOutput enum defined in?"} +{"id": "95", "text": "What do the different values of the ConsoleOutput enum represent?"} +{"id": "96", "text": "How does the UpdateChecker store the timestamp of the last update check?"} +{"id": "97", "text": "What does the UpdateChecker return if the current version is up to date?"} +{"id": "98", "text": "How does the DefaultCredentialRetrievers class handle credential helpers on Windows?"} +{"id": "99", "text": "How does the DefaultCredentialRetrievers class avoid duplicate CredentialRetriever instances?"} +{"id": "100", "text": "What does the `ReproducibleImageTest` test class verify?"} +{"id": "101", "text": "How does the `createImage()` method create the test image?"} +{"id": "102", "text": "What is the expected tarball structure and how is it verified in `testTarballStructure()`?"} +{"id": "103", "text": "What package does the HelloWorld class belong to?"} +{"id": "104", "text": "What license is this code released under?"} +{"id": "105", "text": "What year was this code copyrighted?"} +{"id": "106", "text": "What company owns the copyright to this code?"} +{"id": "107", "text": "How does MavenSettingsServerCredentials infer credentials for a server?"} +{"id": "108", "text": "What exceptions can be thrown when inferring credentials with MavenSettingsServerCredentials?"} +{"id": "109", "text": "What is the format of the returned AuthProperty when inferring credentials?"} +{"id": "110", "text": "How are the test settings files used in the tests?"} +{"id": "111", "text": "How does the testPull() method verify the correctness of the pulled BLOB?"} +{"id": "112", "text": "How is the RegistryClient instance created in the test methods?"} +{"id": "113", "text": "How does JibBuildRunner handle a RegistryUnauthorizedException with a 403 Forbidden status code?"} +{"id": "114", "text": "How does the buildToDockerDaemonAndRun method verify the built image?"} +{"id": "115", "text": "How does the testExecute_dockerClient test work?"} +{"id": "116", "text": "What exception is thrown when registry authentication fails?"} +{"id": "117", "text": "What information is included in the exception message when a `RegistryAuthenticationFailedException` is thrown?"} +{"id": "118", "text": "What is the default length of a generated pepper when no length is specified?"} +{"id": "119", "text": "Is it possible to generate a pepper with a length of zero?"} +{"id": "120", "text": "What is the expected value of the pepper returned by the PepperGenerator.get() method?"} +{"id": "121", "text": "What is the purpose of the slowEquals method that takes two CharSequence objects?"} +{"id": "122", "text": "How does the hash method handle the presence or absence of a salt value?"} +{"id": "123", "text": "What are the input parameters for the BalloonHashingFunction constructor?"} +{"id": "124", "text": "How can I obtain an instance of the BalloonHashingFunction using the factory method?"} +{"id": "125", "text": "What are the test vectors used in the `TEST_VECTORS` array?"} +{"id": "126", "text": "What is the maximum digest size supported by this Blake2b implementation?"} +{"id": "127", "text": "How do you reset the hasher to its initial state?"} +{"id": "128", "text": "What class does BadParametersException extend?"} +{"id": "129", "text": "What package does the BadParametersException class belong to?"} +{"id": "130", "text": "Who is the author of the BadParametersException class?"} +{"id": "131", "text": "Since which version has the BadParametersException class been available?"} +{"id": "132", "text": "What is the purpose of the Hash class?"} +{"id": "133", "text": "What information does the Hash class store?"} +{"id": "134", "text": "How do I create a HashBuilder instance?"} +{"id": "135", "text": "How do I specify the hashing algorithm to use with the HashBuilder?"} +{"id": "136", "text": "What is the difference between withPBKDF2() and withCompressedPBKDF2()?"} +{"id": "137", "text": "How do you create an instance of MessageDigestFunction with a specific hashing algorithm and salt option?"} +{"id": "138", "text": "How do you hash a password using MessageDigestFunction?"} +{"id": "139", "text": "How do you check if a password matches a hashed value using MessageDigestFunction?"} +{"id": "140", "text": "How can you retrieve the hashing algorithm and salt option used by a MessageDigestFunction instance?"} +{"id": "141", "text": "What does the test method `issue92()` do?"} +{"id": "142", "text": "What does the test method `issue99()` do?"} +{"id": "143", "text": "What does the test method `issue93()` do?"} +{"id": "144", "text": "What does the test method `issue120()` do?"} +{"id": "145", "text": "Which package does the Argon2 enum belong to?"} +{"id": "146", "text": "What is the purpose of the `Tag` class?"} +{"id": "147", "text": "What is the purpose of the `pull` function in the `Tag` class?"} +{"id": "148", "text": "How does the Serializer class serialize branch control instructions like br and br_if?"} +{"id": "149", "text": "What is the purpose of the `Log` class?"} +{"id": "150", "text": "What is the expected behavior of the `body` method of the `Log` class?"} +{"id": "151", "text": "What is the purpose of the `procRaise` function in the `Environ` class?"} +{"id": "152", "text": "How does the `procRaise` function handle unsupported signals?"} +{"id": "153", "text": "What is the purpose of the `printPluginMock` function?"} +{"id": "154", "text": "What is the purpose of the `body` method in the mock classes?"} +{"id": "155", "text": "How do you access the magic bytes of a Module?"} +{"id": "156", "text": "What sections are contained in a Module?"} +{"id": "157", "text": "How do you check if a Module has been validated?"} +{"id": "158", "text": "How can I retrieve the current log level using the provided classes?"} +{"id": "159", "text": "How can I set an integer option value using the provided classes?"} +{"id": "160", "text": "What is the purpose of the AVRescaleQ class?"} +{"id": "161", "text": "What is the purpose of the WasmEdge::PO namespace?"} +{"id": "162", "text": "What error codes are defined in the ErrCode enum?"} +{"id": "163", "text": "What data does the Error class store?"} +{"id": "164", "text": "How can I construct an Error object?"} +{"id": "165", "text": "How can I access the error code and message of an Error object?"} +{"id": "166", "text": "How can I set the ID of a specific chapter using the provided functions?"} +{"id": "167", "text": "How can I set the time base of a specific chapter?"} +{"id": "168", "text": "What does the AVChapterStart function do?"} +{"id": "169", "text": "What is the purpose of the `importPk` function?"} +{"id": "170", "text": "How does the `pkExportData` function work?"} +{"id": "171", "text": "How do you create a new instance of the `Pty` struct?"} +{"id": "172", "text": "How can you access the child process watcher associated with a `Pty` instance?"} +{"id": "173", "text": "What does the `merge` function do?"} +{"id": "174", "text": "How does the `merge_tables` function work?"} +{"id": "175", "text": "What does the `merge_sequence` test verify?"} +{"id": "176", "text": "How does the `get_pw_entry` function work?"} +{"id": "177", "text": "What fields does the `Mouse` struct contain?"} +{"id": "178", "text": "How are the default mouse bindings obtained in the `default` implementation of `MouseBindings`?"} +{"id": "179", "text": "How is deserialization handled for the `MouseBindings` struct?"} +{"id": "180", "text": "What is the purpose of the `MouseBinding` type?"} +{"id": "181", "text": "What is the purpose of the `new_nop` method?"} +{"id": "182", "text": "How does the Scheduler schedule a new event?"} +{"id": "183", "text": "How can you cancel a scheduled event?"} +{"id": "184", "text": "How can you check if a timer is already scheduled?"} +{"id": "185", "text": "What does the `attach_handler` function do?"} +{"id": "186", "text": "How do you create a new `Row` instance?"} +{"id": "187", "text": "What does the `grow` method do in the `Row` struct?"} +{"id": "188", "text": "How does the `shrink` method work in the `Row` struct?"} +{"id": "189", "text": "What is the purpose of the `Debug` struct?"} +{"id": "190", "text": "How can I specify the renderer preference in Alacritty?"} +{"id": "191", "text": "What is the purpose of the Display struct?"} +{"id": "192", "text": "How does the Display struct handle configuration updates?"} +{"id": "193", "text": "What is the purpose of the FrameTimer struct?"} +{"id": "194", "text": "What class does HasWeightCol extend?"} +{"id": "195", "text": "What is the name and description of the weight column parameter?"} +{"id": "196", "text": "How can I get the value of the weight column parameter?"} +{"id": "197", "text": "What is the default value of the weight column parameter?"} +{"id": "198", "text": "What parameters does the VectorSlicer class have?"} +{"id": "199", "text": "How do I specify the indices to slice from the input vector?"} +{"id": "200", "text": "What restrictions are there on the indices parameter?"} +{"id": "201", "text": "How does an operator or UDF get access to the current epoch number?"} +{"id": "202", "text": "Are the vector and l2Norm fields mutable in the VectorWithNorm class?"} +{"id": "203", "text": "How do you set the parameters of the UnivariateFeatureSelector?"} +{"id": "204", "text": "What happens if you don't set the feature_type or label_type parameters of the UnivariateFeatureSelector?"} +{"id": "205", "text": "What is the output schema of the UnivariateFeatureSelectorModel after transforming data?"} +{"id": "206", "text": "How does the testParam() method verify the parameter settings of LinearRegression?"} +{"id": "207", "text": "What does the testInputTypeConversion() method verify?"} +{"id": "208", "text": "What does the testRegularization() method check?"} +{"id": "209", "text": "What is the maximum size of a segment that the MemorySegmentWriter will write?"} +{"id": "210", "text": "What is the purpose of the HeadOperatorCoordinator class?"} +{"id": "211", "text": "How does the HeadOperatorCoordinator determine when to send out a GloballyAlignedEvent?"} +{"id": "212", "text": "How is the input data generated in this example?"} +{"id": "213", "text": "How are the results extracted and displayed in this example?"} +{"id": "214", "text": "What execution environment is used in this example?"} +{"id": "215", "text": "How do you create an IndexToStringModel instance?"} +{"id": "216", "text": "How do you set the model data for the IndexToStringModel?"} +{"id": "217", "text": "How do you extract and display the results after applying the IndexToStringModel?"} +{"id": "218", "text": "How do you create a new instance of the ColorEndPatternConverter?"} +{"id": "219", "text": "What parameters does the ColorEndPatternConverter constructor take?"} +{"id": "220", "text": "What namespaces are used in this file?"} +{"id": "221", "text": "What does the XMLFilenameFilter constructor do?"} +{"id": "222", "text": "What is the purpose of the pattern and replacement strings constructed in the XMLFilenameFilter constructor?"} +{"id": "223", "text": "How would the XMLFilenameFilter be used?"} +{"id": "224", "text": "What C++ standard library classes are used in this code?"} +{"id": "225", "text": "How does the testTrigger unit test work?"} +{"id": "226", "text": "How does the testValid unit test work?"} +{"id": "227", "text": "How do you configure the logging system with a specific layout using BasicConfigurator?"} +{"id": "228", "text": "How do you configure the logging system with a specific appender using BasicConfigurator?"} +{"id": "229", "text": "How do you reset the logging configuration to its default state using BasicConfigurator?"} +{"id": "230", "text": "What is the default layout used by BasicConfigurator if no layout is provided?"} +{"id": "231", "text": "What is the purpose of the WideLife template class used in the code?"} +{"id": "232", "text": "How does the hexdump function handle different character types for logging?"} +{"id": "233", "text": "How do you create a DenyAllFilter instance?"} +{"id": "234", "text": "What do you need to do after creating a DenyAllFilter instance?"} +{"id": "235", "text": "What is the signature of the decide() method of the DenyAllFilter?"} +{"id": "236", "text": "What namespaces are used in this file?"} +{"id": "237", "text": "What is the purpose of the MinimumTestCase class?"} +{"id": "238", "text": "What is the purpose of the common() method?"} +{"id": "239", "text": "How do you obtain an instance of NDCPatternConverter?"} +{"id": "240", "text": "How does the NDCPatternConverter format the logging event?"} +{"id": "241", "text": "What is the inheritance hierarchy of NDCPatternConverter?"} +{"id": "242", "text": "What macros are used in the NDCPatternConverter class declaration?"} +{"id": "243", "text": "How does the test1() method test the FMTLayout?"} +{"id": "244", "text": "What is the purpose of the common() method?"} +{"id": "245", "text": "How do you construct a BufferedWriter object?"} +{"id": "246", "text": "What methods does BufferedWriter override from its parent class?"} +{"id": "247", "text": "How does BufferedWriter handle object destruction?"} diff --git a/experiments/data/contextual-embeddings/data_context/passages.jsonl b/experiments/data/contextual-embeddings/data_context/passages.jsonl new file mode 100644 index 0000000..15e1282 --- /dev/null +++ b/experiments/data/contextual-embeddings/data_context/passages.jsonl @@ -0,0 +1,737 @@ +{"page_content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\n\n\nThis chunk describes the `DiffExecutor` struct, which is an executor for differential fuzzing that wraps two executors and runs them sequentially with the same input.", "metadata": {"chunk_id": "doc_1_chunk_0", "original_index": 0, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_0"}, "type": "Document"} +{"page_content": "impl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n\n\nThe provided chunk is the implementation of the `new` method for the `DiffExecutor` struct, which is responsible for creating a new instance of the `DiffExecutor` by wrapping the given primary and secondary executors, as well as the differential observers.", "metadata": {"chunk_id": "doc_1_chunk_1", "original_index": 1, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_1"}, "type": "Document"} +{"page_content": " /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n\n\nThe provided chunk is part of the implementation of the `DiffExecutor` struct, which is a wrapper around two executors that will be run after each other with the same input. The chunk includes the methods to retrieve the primary and secondary executors, as well as the implementation of the `run_target` method, which runs the primary and secondary executors and compares their exit codes.", "metadata": {"chunk_id": "doc_1_chunk_2", "original_index": 2, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_2"}, "type": "Document"} +{"page_content": " .pre_observe_first_all(observers.primary.as_mut())?;\n observers.primary.as_mut().pre_exec_all(state, input)?;\n let ret1 = self.primary.run_target(fuzzer, state, mgr, input)?;\n observers\n .primary\n .as_mut()\n .post_exec_all(state, input, &ret1)?;\n observers\n .differential\n .post_observe_first_all(observers.primary.as_mut())?;\n observers\n .differential\n\n\nThe provided chunk is part of the `run_target` method implementation of the `DiffExecutor` struct, which is responsible for executing the primary and secondary executors and observing their behavior.", "metadata": {"chunk_id": "doc_1_chunk_3", "original_index": 3, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_3"}, "type": "Document"} +{"page_content": " .pre_observe_second_all(observers.secondary.as_mut())?;\n observers.secondary.as_mut().pre_exec_all(state, input)?;\n let ret2 = self.secondary.run_target(fuzzer, state, mgr, input)?;\n observers\n .secondary\n .as_mut()\n .post_exec_all(state, input, &ret2)?;\n observers\n .differential\n .post_observe_second_all(observers.secondary.as_mut())?;\n if ret1 == ret2 {\n Ok(ret1)\n } else {\n // We found a diff in the exit codes!\n Ok(ExitKind::Diff {\n primary: ret1.into(),\n secondary: ret2.into(),\n })\n }\n }\n}\n\n\n\nThis chunk is part of the `run_target` method implementation for the `DiffExecutor` struct, which is responsible for executing the primary and secondary executors and comparing their results.", "metadata": {"chunk_id": "doc_1_chunk_4", "original_index": 4, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_4"}, "type": "Document"} +{"page_content": "/// Proxy the observers of the inner executors\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(\n bound = \"A: serde::Serialize + serde::de::DeserializeOwned, B: serde::Serialize + serde::de::DeserializeOwned, DOT: serde::Serialize + serde::de::DeserializeOwned\"\n)]\npub struct ProxyObserversTuple {\n primary: OwnedMutPtr,\n secondary: OwnedMutPtr,\n differential: DOT,\n}\n\nimpl ObserversTuple for ProxyObserversTuple\nwhere\n A: ObserversTuple,\n B: ObserversTuple,\n DOT: DifferentialObserversTuple,\n S: UsesInput,\n{\n fn pre_exec_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_all(state, input)\n }\n\n\n\nThe provided chunk is a part of the implementation of the `ProxyObserversTuple` struct, which is used to proxy the observers of the inner executors in the `DiffExecutor` struct. This struct is responsible for managing the observers of the primary and secondary executors, as well as the differential observers, and is used within the `DiffExecutor` implementation.", "metadata": {"chunk_id": "doc_1_chunk_5", "original_index": 5, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_5"}, "type": "Document"} +{"page_content": " fn post_exec_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential.post_exec_all(state, input, exit_kind)\n }\n\n fn pre_exec_child_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_child_all(state, input)\n }\n\n fn post_exec_child_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential\n .post_exec_child_all(state, input, exit_kind)\n }\n\n\n\nThe provided chunk is part of the implementation of the `ProxyObserversTuple` struct, which is used to proxy the observers of the inner executors in the `DiffExecutor` struct. The chunk contains the implementation of the `ObserversTuple` trait for the `ProxyObserversTuple` struct, specifically the `post_exec_all`, `pre_exec_child_all`, and `post_exec_child_all` methods.", "metadata": {"chunk_id": "doc_1_chunk_6", "original_index": 6, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_6"}, "type": "Document"} +{"page_content": " /// Returns true if a `stdout` observer was added to the list\n #[inline]\n fn observes_stdout(&self) -> bool {\n self.primary.as_ref().observes_stdout() || self.secondary.as_ref().observes_stdout()\n }\n /// Returns true if a `stderr` observer was added to the list\n #[inline]\n fn observes_stderr(&self) -> bool {\n self.primary.as_ref().observes_stderr() || self.secondary.as_ref().observes_stderr()\n }\n\n\n\nThe provided chunk is part of the `ProxyObserversTuple` implementation, which is a proxy for the observers of the inner executors in the `DiffExecutor` struct.", "metadata": {"chunk_id": "doc_1_chunk_7", "original_index": 7, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_7"}, "type": "Document"} +{"page_content": " /// Runs `observe_stdout` for all stdout observers in the list\n fn observe_stdout(&mut self, stdout: &[u8]) {\n self.primary.as_mut().observe_stderr(stdout);\n self.secondary.as_mut().observe_stderr(stdout);\n }\n\n /// Runs `observe_stderr` for all stderr observers in the list\n fn observe_stderr(&mut self, stderr: &[u8]) {\n self.primary.as_mut().observe_stderr(stderr);\n self.secondary.as_mut().observe_stderr(stderr);\n }\n}\n\n\n\nThe provided chunk is part of the implementation of the `ProxyObserversTuple` struct, which is used to proxy the observers of the inner executors in the `DiffExecutor` struct. The `observe_stdout` and `observe_stderr` methods are responsible for running the `observe_stdout` and `observe_stderr` methods on the primary and secondary observers, respectively.", "metadata": {"chunk_id": "doc_1_chunk_8", "original_index": 8, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_8"}, "type": "Document"} +{"page_content": "impl MatchName for ProxyObserversTuple\nwhere\n A: MatchName,\n B: MatchName,\n DOT: MatchName,\n{\n fn match_name(&self, name: &str) -> Option<&T> {\n if let Some(t) = self.primary.as_ref().match_name::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_ref().match_name::(name) {\n Some(t)\n } else {\n self.differential.match_name::(name)\n }\n }\n fn match_name_mut(&mut self, name: &str) -> Option<&mut T> {\n if let Some(t) = self.primary.as_mut().match_name_mut::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_mut().match_name_mut::(name) {\n Some(t)\n } else {\n self.differential.match_name_mut::(name)\n }\n }\n}\n\n\n\nThe provided chunk is an implementation of the `MatchName` trait for the `ProxyObserversTuple` struct, which is used to proxy the observers of the inner executors in the `DiffExecutor` struct.", "metadata": {"chunk_id": "doc_1_chunk_9", "original_index": 9, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_9"}, "type": "Document"} +{"page_content": "impl ProxyObserversTuple {\n fn set(&mut self, primary: &A, secondary: &B) {\n self.primary = OwnedMutPtr::Ptr(ptr::from_ref(primary) as *mut A);\n self.secondary = OwnedMutPtr::Ptr(ptr::from_ref(secondary) as *mut B);\n }\n}\n\nimpl UsesObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n type Observers = ProxyObserversTuple;\n}\n\nimpl UsesState for DiffExecutor\nwhere\n A: UsesState,\n B: UsesState,\n{\n type State = A::State;\n}\n\n\n\nThis chunk defines the `ProxyObserversTuple` struct and its associated methods, as well as the `UsesObservers` and `UsesState` trait implementations for the `DiffExecutor` struct. These components are part of the differential fuzzing executor implementation in the libafl library.", "metadata": {"chunk_id": "doc_1_chunk_10", "original_index": 10, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_10"}, "type": "Document"} +{"page_content": "impl HasObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n #[inline]\n fn observers(&self) -> &ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_ref().unwrap()\n }\n }\n\n\n\nThe provided chunk is an implementation of the `HasObservers` trait for the `DiffExecutor` struct, which is responsible for managing the observers of the primary and secondary executors, as well as the differential observers.", "metadata": {"chunk_id": "doc_1_chunk_11", "original_index": 11, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_11"}, "type": "Document"} +{"page_content": " #[inline]\n fn observers_mut(&mut self) -> &mut ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_mut().unwrap()\n }\n }\n}\n\n\nThe provided chunk is part of the implementation of the `DiffExecutor` struct, which is responsible for executing a primary and secondary executor with the same input and comparing the results. The `observers_mut()` method is used to retrieve a mutable reference to the `ProxyObserversTuple` that manages the observers for both the primary and secondary executors.", "metadata": {"chunk_id": "doc_1_chunk_12", "original_index": 12, "pid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_12"}, "type": "Document"} +{"page_content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n\n\nThis chunk contains the necessary imports and declarations for the main function of a fuzzing application built using the LibAFL framework. It includes the configuration for the user interface, corpus management, event handling, executor, feedback, and mutational stages.", "metadata": {"chunk_id": "doc_2_chunk_0", "original_index": 0, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_0"}, "type": "Document"} +{"page_content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n\n\nThis chunk defines a coverage map and a function to set signals in the map, and then contains the main function that sets up the fuzzing process, including the harness function that is being fuzzed.", "metadata": {"chunk_id": "doc_2_chunk_1", "original_index": 1, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_1"}, "type": "Document"} +{"page_content": " #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n\n\nThis chunk is part of the `main()` function, which defines the fuzzing harness and sets up the necessary components for the fuzzing process, including the observation channel, feedback mechanisms, and objective.", "metadata": {"chunk_id": "doc_2_chunk_2", "original_index": 2, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_2"}, "type": "Document"} +{"page_content": " // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n\n\nThis chunk is part of the main function that sets up the fuzzing environment, including creating the initial state for the fuzzing process.", "metadata": {"chunk_id": "doc_2_chunk_3", "original_index": 3, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_3"}, "type": "Document"} +{"page_content": " // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n\n\nThis chunk sets up the monitoring, event management, scheduling, and fuzzing components of the LibAFL-based fuzzer.", "metadata": {"chunk_id": "doc_2_chunk_4", "original_index": 4, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_4"}, "type": "Document"} +{"page_content": " // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n\n\nThis chunk sets up the executor for the in-process function and generates the initial inputs for the fuzzing process.", "metadata": {"chunk_id": "doc_2_chunk_5", "original_index": 5, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_5"}, "type": "Document"} +{"page_content": " // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n\n\nThis chunk sets up the mutational stage of the fuzzer, including the mutators to be used, and then runs the fuzzing loop using the configured stages, executor, state, and event manager.", "metadata": {"chunk_id": "doc_2_chunk_6", "original_index": 6, "pid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_6"}, "type": "Document"} +{"page_content": "use core::{ffi::c_void, fmt::Debug};\nuse std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};\n\nuse libafl::{\n events::EventFirer,\n executors::ExitKind,\n feedbacks::Feedback,\n inputs::UsesInput,\n observers::{Observer, ObserversTuple},\n state::State,\n Error,\n};\nuse libafl_bolts::Named;\nuse libc::SIGABRT;\nuse serde::{Deserialize, Serialize};\n\nextern \"C\" {\n fn libafl_check_malloc_size(ptr: *const c_void) -> usize;\n}\n\nstatic RUNNING: AtomicBool = AtomicBool::new(false);\nstatic OOMED: AtomicBool = AtomicBool::new(false);\nstatic RSS_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n// 2GB, which is the default\nstatic MALLOC_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n\nstatic MALLOC_SIZE: AtomicUsize = AtomicUsize::new(0);\n\n\n\nThis chunk contains the imports, external function declarations, and global static variables used in the implementation of the OomObserver and OomFeedback components in the LibAFL fuzzing framework.", "metadata": {"chunk_id": "doc_3_chunk_0", "original_index": 0, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_0"}, "type": "Document"} +{"page_content": "/// malloc hook which will be invoked if address sanitizer is present. Used to detect if the target makes a malloc call\n/// that will exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid freshly allocated pointers backed by allocations of `size`.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_malloc_hook(ptr: *const c_void, size: usize) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = match unsafe { libafl_check_malloc_size(ptr) } {\n 0 => size, // either the malloc size function didn't work or it's really zero-sized\n real => real,\n };\n\n\n\nThe provided chunk is a part of the implementation of the `OomObserver` and `OomFeedback` components in a Rust library called `libafl`. The `OomObserver` is responsible for detecting if the target application would run out of memory or violate the permissible usage of `malloc`, and the `OomFeedback` is used to provide feedback on whether the target application crashed due to an observed out-of-memory (OOM) condition.", "metadata": {"chunk_id": "doc_3_chunk_1", "original_index": 1, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_1"}, "type": "Document"} +{"page_content": " let total = MALLOC_SIZE.fetch_add(size, Ordering::Relaxed) + size;\n if (size > MALLOC_MAX.load(Ordering::Relaxed) || total > RSS_MAX.load(Ordering::Relaxed))\n && !OOMED.swap(true, Ordering::Relaxed)\n {\n unsafe {\n // we need to kill the process in a way that immediately triggers the crash handler\n libc::raise(SIGABRT);\n }\n }\n }\n}\n\n\n\nThe provided chunk is part of the `__sanitizer_malloc_hook` function, which is a hook that is invoked when the target program makes a malloc call. The purpose of this hook is to detect if the target program's malloc call will exceed the permissible size, and if so, to immediately kill the process to trigger the crash handler.", "metadata": {"chunk_id": "doc_3_chunk_2", "original_index": 2, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_2"}, "type": "Document"} +{"page_content": "/// free hook which will be invoked if ASAN is present. Used to detect if the target makes a malloc call that will\n/// exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid allocated pointers, about to be freed.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_free_hook(ptr: *const c_void) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = unsafe { libafl_check_malloc_size(ptr) };\n MALLOC_SIZE\n .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |existing| {\n Some(existing.saturating_sub(size))\n })\n .expect(\"must complete successfully\");\n }\n}\n\n\n\nThe provided chunk is a part of the implementation of the `OomObserver` and `OomFeedback` components in a Rust library called `libafl`. The `__sanitizer_free_hook` function is a C-style hook that is called when the target program frees memory, and it is used to track the total memory usage of the target program to detect if it exceeds the specified limits.", "metadata": {"chunk_id": "doc_3_chunk_3", "original_index": 3, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_3"}, "type": "Document"} +{"page_content": "const OOM_OBS_NAME: &str = \"libfuzzer-like-oom\";\n\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n\nimpl Named for OomObserver {\n // strictly one name to prevent two from being registered\n fn name(&self) -> &str {\n OOM_OBS_NAME\n }\n}\n\n\n\nThe provided chunk defines an `OomObserver` struct and its associated methods, which is used to detect if the target program would run out of memory or violate the permissible usage of `malloc`. This observer is part of a larger library or framework for fuzzing or testing purposes.", "metadata": {"chunk_id": "doc_3_chunk_4", "original_index": 4, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_4"}, "type": "Document"} +{"page_content": "impl Observer for OomObserver\nwhere\n S: UsesInput,\n{\n fn pre_exec(&mut self, _state: &mut S, _input: &S::Input) -> Result<(), Error> {\n OOMED.store(false, Ordering::Relaxed);\n // must reset for platforms which do not offer malloc tracking\n MALLOC_SIZE.store(0, Ordering::Relaxed);\n RUNNING.store(true, Ordering::Relaxed);\n Ok(())\n }\n\n fn post_exec(\n &mut self,\n _state: &mut S,\n _input: &S::Input,\n _exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n RUNNING.store(false, Ordering::Relaxed);\n self.oomed = OOMED.load(Ordering::Relaxed);\n Ok(())\n }\n\n fn pre_exec_child(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.pre_exec(state, input)\n }\n\n fn post_exec_child(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.post_exec(state, input, exit_kind)\n }\n}\n\n\n\nThe provided chunk is an implementation of the `Observer` trait for the `OomObserver` struct, which is responsible for detecting if the target program would run out of memory or violate the permissible usage of `malloc`. This implementation includes methods for setting up and tearing down the observation process, as well as handling pre- and post-execution of the target program.", "metadata": {"chunk_id": "doc_3_chunk_5", "original_index": 5, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_5"}, "type": "Document"} +{"page_content": "/// Feedback for the similarly named [`OomObserver`] to detect if the target crashed due to an observed OOM\n#[derive(Debug, Serialize, Deserialize, Copy, Clone, Default)]\npub struct OomFeedback;\n\nimpl OomFeedback {\n /// Whether the target OOM'd in the last execution\n pub fn oomed() -> bool {\n OOMED.load(Ordering::Relaxed)\n }\n}\n\nimpl Named for OomFeedback {\n fn name(&self) -> &str {\n \"oom\"\n }\n}\n\nimpl Feedback for OomFeedback\nwhere\n S: State,\n{\n fn is_interesting(\n &mut self,\n _state: &mut S,\n _manager: &mut EM,\n _input: &S::Input,\n _observers: &OT,\n _exit_kind: &ExitKind,\n ) -> Result\n where\n EM: EventFirer,\n OT: ObserversTuple,\n {\n Ok(Self::oomed())\n }\n}\n\n\nThe provided chunk defines an `OomFeedback` struct and its associated methods, which are used to detect if the target program crashed due to an observed out-of-memory (OOM) condition. This feedback mechanism is designed to work in conjunction with the `OomObserver` defined earlier in the document.", "metadata": {"chunk_id": "doc_3_chunk_6", "original_index": 6, "pid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_6"}, "type": "Document"} +{"page_content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}\n\nThe provided chunk is a C function named `both_require` that checks if a given byte array of a certain length starts with the sequence \"abc\". It returns `ACCEPT` if the sequence is found, and `REJECT` otherwise.", "metadata": {"chunk_id": "doc_4_chunk_0", "original_index": 0, "pid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f_0"}, "type": "Document"} +{"page_content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}\n\nThis chunk of code appears to be a C program that uses the American Fuzzy Lop (AFL) fuzzer to test the vulnerability of a function called `vuln()`. The program reads input from a file or shared memory and checks for specific conditions before calling the `vuln()` function.", "metadata": {"chunk_id": "doc_5_chunk_0", "original_index": 0, "pid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95_0"}, "type": "Document"} +{"page_content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n\n\nThe provided chunk is a part of a Rust module that defines a `MergeScheduler` struct and its associated implementations. The `MergeScheduler` is used in the context of the `libafl` library, which is a framework for building fuzzing tools.", "metadata": {"chunk_id": "doc_6_chunk_0", "original_index": 0, "pid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_0"}, "type": "Document"} +{"page_content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n\n\nThe provided chunk is an implementation of the `RemovableScheduler` and `Scheduler` traits for the `MergeScheduler` struct, which is used to manage a corpus of test cases in the context of a fuzzing framework.", "metadata": {"chunk_id": "doc_6_chunk_1", "original_index": 1, "pid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_1"}, "type": "Document"} +{"page_content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n\n\nThe provided chunk is part of the implementation of the `MergeScheduler` struct, which is a custom scheduler used in the `libafl` library for fuzzing. The `next()` method is not implemented, as it is not suitable for actual scheduling. The `new()`, `removable()`, and `current()` methods are provided for managing the internal state of the `MergeScheduler`.", "metadata": {"chunk_id": "doc_6_chunk_2", "original_index": 2, "pid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_2"}, "type": "Document"} +{"page_content": "use std::sync::OnceLock;\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::aarch64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n X0 = 0,\n X1 = 1,\n X2 = 2,\n X3 = 3,\n X4 = 4,\n X5 = 5,\n X6 = 6,\n X7 = 7,\n X8 = 8,\n X9 = 9,\n X10 = 10,\n X11 = 11,\n X12 = 12,\n X13 = 13,\n X14 = 14,\n X15 = 15,\n X16 = 16,\n X17 = 17,\n X18 = 18,\n X19 = 19,\n X20 = 20,\n X21 = 21,\n X22 = 22,\n X23 = 23,\n X24 = 24,\n X25 = 25,\n X26 = 26,\n X27 = 27,\n X28 = 28,\n X29 = 29,\n X30 = 30,\n Sp = 31,\n Pc = 32,\n Pstate = 33,\n}\n\n\n\nThis chunk defines an enumeration of ARM64 registers used in the context of a CPU emulator or virtualization system.", "metadata": {"chunk_id": "doc_7_chunk_0", "original_index": 0, "pid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_0"}, "type": "Document"} +{"page_content": "static BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::X0,\n BackdoorArgs::Cmd => Regs::X0,\n BackdoorArgs::Arg1 => Regs::X1,\n BackdoorArgs::Arg2 => Regs::X2,\n BackdoorArgs::Arg3 => Regs::X3,\n BackdoorArgs::Arg4 => Regs::X4,\n BackdoorArgs::Arg5 => Regs::X5,\n BackdoorArgs::Arg6 => Regs::X6,\n }\n })\n}\n\n\n\nThe provided chunk defines a static `OnceLock` variable `BACKDOOR_ARCH_REGS` and a function `get_backdoor_arch_regs()` that returns a reference to an `EnumMap` mapping `BackdoorArgs` to `Regs`. This is likely part of a larger module or crate that provides functionality related to a backdoor or some kind of system-level access.", "metadata": {"chunk_id": "doc_7_chunk_1", "original_index": 1, "pid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_1"}, "type": "Document"} +{"page_content": "/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Fp: Regs = Regs::X29;\n pub const Lr: Regs = Regs::X30;\n}\n\n/// Return an ARM64 ArchCapstoneBuilder\npub fn capstone() -> capstone::arch::arm64::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .arm64()\n .mode(capstone::arch::arm64::ArchMode::Arm)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n self.read_reg(Regs::Lr)\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n self.write_reg(Regs::Lr, val)\n }\n\n\n\nThis chunk defines alias registers, a function to return an ARM64 ArchCapstoneBuilder, a type alias for GuestReg, and implementation of ArchExtras trait for the CPU struct, which includes functions to read and write the return address.", "metadata": {"chunk_id": "doc_7_chunk_2", "original_index": 2, "pid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_2"}, "type": "Document"} +{"page_content": " fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::X0,\n 1 => Regs::X1,\n 2 => Regs::X2,\n 3 => Regs::X3,\n 4 => Regs::X4,\n 5 => Regs::X5,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n\n\nThe provided chunk is a part of the `ArchExtras` trait implementation for the `CPU` struct, which defines functions to read and write function arguments and return addresses for the ARM64 architecture.", "metadata": {"chunk_id": "doc_7_chunk_3", "original_index": 3, "pid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_3"}, "type": "Document"} +{"page_content": " fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::X0, val),\n 1 => self.write_reg(Regs::X1, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n\n\nThe provided chunk is a part of the implementation of the `ArchExtras` trait for the `CPU` struct, which provides functionality for reading and writing function arguments and return addresses for the ARM64 architecture.", "metadata": {"chunk_id": "doc_7_chunk_4", "original_index": 4, "pid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_4"}, "type": "Document"} +{"page_content": "use std::{mem::size_of, sync::OnceLock};\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::x86_64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n Rax = 0,\n Rbx = 1,\n Rcx = 2,\n Rdx = 3,\n Rsi = 4,\n Rdi = 5,\n Rbp = 6,\n Rsp = 7,\n R8 = 8,\n R9 = 9,\n R10 = 10,\n R11 = 11,\n R12 = 12,\n R13 = 13,\n R14 = 14,\n R15 = 15,\n Rip = 16,\n Rflags = 17,\n}\n\nstatic BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\n\n\nThis chunk defines the register enumeration and related functionality for a CPU architecture, specifically for the x86_64 architecture. It includes the definition of the `Regs` enum, which represents the various registers available in the architecture, and a static `BACKDOOR_ARCH_REGS` variable that maps the `BackdoorArgs` enum to the corresponding registers.", "metadata": {"chunk_id": "doc_8_chunk_0", "original_index": 0, "pid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_0"}, "type": "Document"} +{"page_content": "pub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::Rax,\n BackdoorArgs::Cmd => Regs::Rax,\n BackdoorArgs::Arg1 => Regs::Rdi,\n BackdoorArgs::Arg2 => Regs::Rsi,\n BackdoorArgs::Arg3 => Regs::Rdx,\n BackdoorArgs::Arg4 => Regs::R10,\n BackdoorArgs::Arg5 => Regs::R8,\n BackdoorArgs::Arg6 => Regs::R9,\n }\n })\n}\n\n/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Sp: Regs = Regs::Rsp;\n pub const Pc: Regs = Regs::Rip;\n}\n\n/// Return an X86 `ArchCapstoneBuilder`\n#[must_use]\npub fn capstone() -> capstone::arch::x86::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .x86()\n .mode(capstone::arch::x86::ArchMode::Mode64)\n}\n\npub type GuestReg = u64;\n\n\n\nThis chunk defines functions and types related to the architecture of a backdoor system, including a function to retrieve the mapping of backdoor arguments to CPU registers, aliases for common registers, and a function to create a Capstone disassembler for the x86-64 architecture.", "metadata": {"chunk_id": "doc_8_chunk_1", "original_index": 1, "pid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_1"}, "type": "Document"} +{"page_content": "impl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let mut ret_addr = [0; size_of::()];\n unsafe { self.read_mem(stack_ptr, &mut ret_addr) };\n Ok(GuestReg::from_le_bytes(ret_addr).into())\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let val: GuestReg = val.into();\n let ret_addr = val.to_le_bytes();\n unsafe { self.write_mem(stack_ptr, &ret_addr) };\n Ok(())\n }\n\n\n\nThe provided chunk is an implementation of the `ArchExtras` trait for the `CPU` struct, which includes methods for reading and writing the return address on the stack.", "metadata": {"chunk_id": "doc_8_chunk_2", "original_index": 2, "pid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_2"}, "type": "Document"} +{"page_content": " fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::Rdi,\n 1 => Regs::Rsi,\n 2 => Regs::Rdx,\n 3 => Regs::Rcx,\n 4 => Regs::R8,\n 5 => Regs::R9,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n\n\nThe provided chunk is a part of the `ArchExtras` trait implementation for the `CPU` struct, specifically the `read_function_argument` method. This method is responsible for reading function arguments based on the provided calling convention (Cdecl) and argument index.", "metadata": {"chunk_id": "doc_8_chunk_3", "original_index": 3, "pid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_3"}, "type": "Document"} +{"page_content": " fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::Rdi, val),\n 1 => self.write_reg(Regs::Rsi, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n\n\nThe provided chunk is a part of the `ArchExtras` trait implementation for the `CPU` struct, specifically the `write_function_argument` method. This method is responsible for writing function arguments to the appropriate registers based on the Cdecl calling convention.", "metadata": {"chunk_id": "doc_8_chunk_4", "original_index": 4, "pid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_4"}, "type": "Document"} +{"page_content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\n\n\nThe chunk represents the definition of the `NautilusInput` struct, which is an implementation of the `Input` trait for the Nautilus grammar fuzzer. It includes the necessary imports, the `NautilusInput` struct definition, and related methods.", "metadata": {"chunk_id": "doc_9_chunk_0", "original_index": 0, "pid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_0"}, "type": "Document"} +{"page_content": "impl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n\n\nThe provided chunk contains the implementation of the `NautilusInput` struct, which is an `Input` implementation for the `Nautilus` grammar fuzzer. It includes methods for generating a name for the input, converting the input to an `Rc>`, and calculating the length of the input. Additionally, it provides a constructor for creating a new `NautilusInput` instance.", "metadata": {"chunk_id": "doc_9_chunk_1", "original_index": 1, "pid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_1"}, "type": "Document"} +{"page_content": " /// Create an empty [`Input`]\n #[must_use]\n pub fn empty() -> Self {\n Self {\n tree: Tree {\n rules: vec![],\n sizes: vec![],\n paren: vec![],\n },\n }\n }\n\n /// Generate a `Nautilus` input from the given bytes\n pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) {\n bytes.clear();\n self.tree.unparse(NodeID::from(0), &context.ctx, bytes);\n }\n\n\n\nThe provided chunk is part of the implementation of the `NautilusInput` struct, which is an `Input` implementation for the `Nautilus` grammar fuzzer. The chunk includes methods for creating an empty `NautilusInput` and generating a `Nautilus` input from the given bytes.", "metadata": {"chunk_id": "doc_9_chunk_2", "original_index": 2, "pid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_2"}, "type": "Document"} +{"page_content": " /// Get the tree representation of this input\n #[must_use]\n pub fn tree(&self) -> &Tree {\n &self.tree\n }\n\n /// Get the tree representation of this input, as a mutable reference\n #[must_use]\n pub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n }\n}\n\nimpl Hash for NautilusInput {\n fn hash(&self, state: &mut H) {\n self.tree().paren.hash(state);\n for r in &self.tree().rules {\n match r {\n RuleIDOrCustom::Custom(a, b) => {\n a.hash(state);\n b.hash(state);\n }\n RuleIDOrCustom::Rule(a) => a.hash(state),\n }\n }\n self.tree().sizes.hash(state);\n }\n}\n\n\n\nThe provided chunk contains methods related to the `NautilusInput` struct, which is an implementation of the `Input` trait for the Nautilus grammar fuzzer. The chunk includes methods to access the tree representation of the input, both as a read-only and mutable reference, as well as a method to implement the `Hash` trait for the `NautilusInput` struct.", "metadata": {"chunk_id": "doc_9_chunk_3", "original_index": 3, "pid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_3"}, "type": "Document"} +{"page_content": "/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n\n\nThe provided chunk defines a struct `NautilusToBytesInputConverter` and its associated methods, which is an `InputConverter` implementation to convert from `NautilusInput` to `BytesInput`. This is part of the implementation for the `Nautilus` grammar fuzzer methods.", "metadata": {"chunk_id": "doc_9_chunk_4", "original_index": 4, "pid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_4"}, "type": "Document"} +{"page_content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n\n\nThis chunk defines a Logger struct and its associated initialization function, which is used to set up logging functionality in the application.", "metadata": {"chunk_id": "doc_10_chunk_0", "original_index": 0, "pid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2_0"}, "type": "Document"} +{"page_content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n\n\nThe provided chunk is the beginning of the `Registry` class definition within a larger Python module. The `Registry` class is responsible for managing the registration and retrieval of various types within the module.", "metadata": {"chunk_id": "doc_11_chunk_0", "original_index": 0, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_0"}, "type": "Document"} +{"page_content": " _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n\n\nThe chunk represents the class-level attributes and a method of the `Registry` class, which is responsible for managing the registration of various types within the Ciphey framework.", "metadata": {"chunk_id": "doc_11_chunk_1", "original_index": 1, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_1"}, "type": "Document"} +{"page_content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n\n\nThe provided chunk is a part of the `_real_register` method of the `Registry` class, which is responsible for registering input types with the registry.", "metadata": {"chunk_id": "doc_11_chunk_2", "original_index": 2, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_2"}, "type": "Document"} +{"page_content": " # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n\n\nThe provided chunk is part of the `_real_register` method of the `Registry` class, which is responsible for registering a new input type with the registry. The chunk is specifically responsible for determining the module type of the input type, which is used to properly register the type within the registry.", "metadata": {"chunk_id": "doc_11_chunk_3", "original_index": 3, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_3"}, "type": "Document"} +{"page_content": " continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n\n\nThis chunk is part of the `_real_register` method of the `Registry` class, which is responsible for registering a new input type with the registry. The code in this chunk is determining the module type of the input type, and ensuring that it is derived from a single registrable base class.", "metadata": {"chunk_id": "doc_11_chunk_4", "original_index": 4, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_4"}, "type": "Document"} +{"page_content": " # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n\n\nThis chunk is part of the `_real_register` method of the `Registry` class, which is responsible for registering a new input type with the registry. The code in this chunk handles the case where the input type is a subclass of the `Checker` class, and it replaces the input type with a `PolymorphicChecker` if necessary, and updates the registry's internal data structures accordingly.", "metadata": {"chunk_id": "doc_11_chunk_5", "original_index": 5, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_5"}, "type": "Document"} +{"page_content": " # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n\n\nThis chunk is part of the `_real_register` method of the `Registry` class, which is responsible for registering input types with the registry. It handles the difference between the `register` and `register_multi` methods, determining the appropriate module base and arguments based on the provided input type and arguments.", "metadata": {"chunk_id": "doc_11_chunk_6", "original_index": 6, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_6"}, "type": "Document"} +{"page_content": " self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n\n\nThe provided chunk is part of the `Registry` class in the Ciphey project. It contains methods for registering and retrieving various types of objects, such as Checkers, Crackers, Decoders, and others, within the Ciphey framework.", "metadata": {"chunk_id": "doc_11_chunk_7", "original_index": 7, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_7"}, "type": "Document"} +{"page_content": " target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n\n\nThe chunk is part of the `Registry` class, which is responsible for managing the registration and retrieval of various types within the Ciphey framework.", "metadata": {"chunk_id": "doc_11_chunk_8", "original_index": 8, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_8"}, "type": "Document"} +{"page_content": " def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n\n\nThe provided chunk is part of the implementation of the `Registry` class, which is responsible for managing the registration and retrieval of various types within the Ciphey project. The `get_targeted()` method retrieves the types associated with a specific target, the `get_all_names()` method returns a list of all registered names, and the `__str__()` method provides a string representation of the registry.", "metadata": {"chunk_id": "doc_11_chunk_9", "original_index": 9, "pid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_9"}, "type": "Document"} +{"page_content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n\n\nThe chunk represents the implementation of the `decode` method of the `Octal` class, which is a decoder for performing octal decoding on the input ciphertext.", "metadata": {"chunk_id": "doc_12_chunk_0", "original_index": 0, "pid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_0"}, "type": "Document"} +{"page_content": " return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n\n\nThe provided chunk is the core implementation of the Octal decoder class, which is responsible for decoding octal-encoded text. It includes the main `decode()` method, as well as the class-level methods for setting the priority, initializing the class, and defining the target decoding type.", "metadata": {"chunk_id": "doc_12_chunk_1", "original_index": 1, "pid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_1"}, "type": "Document"} +{"page_content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n\n\nThe provided chunk is a part of the implementation of the A1Z26 decoder class within a larger Python module. The class is responsible for decoding text using the A1Z26 cipher, which maps each letter of the alphabet to its corresponding number (A=1, B=2, ..., Z=26).", "metadata": {"chunk_id": "doc_13_chunk_0", "original_index": 0, "pid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_0"}, "type": "Document"} +{"page_content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n\n\nThe provided chunk is the implementation of the `decode` method of the `A1z26` class, which is a decoder for the A1Z26 cipher. The class is registered in the `registry` and is responsible for decoding the ciphertext by converting the numeric values to their corresponding alphabetic characters.", "metadata": {"chunk_id": "doc_13_chunk_1", "original_index": 1, "pid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_1"}, "type": "Document"} +{"page_content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n\n\nThis chunk defines a Base58 (Ripple) decoder class within a Python module.", "metadata": {"chunk_id": "doc_14_chunk_0", "original_index": 0, "pid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad_0"}, "type": "Document"} +{"page_content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Morse_code(Decoder[str]):\n # A priority list for char/word boundaries\n BOUNDARIES = {\" \": 1, \"/\": 2, \"\\n\": 3}\n PURGE = {ord(c): None for c in BOUNDARIES.keys()}\n MAX_PRIORITY = 3\n ALLOWED = {\".\", \"-\", \" \", \"/\", \"\\n\"}\n MORSE_CODE_DICT: Dict[str, str]\n MORSE_CODE_DICT_INV: Dict[str, str]\n\n def decode(self, ctext: T) -> Optional[U]:\n logging.debug(\"Attempting Morse code decoder\")\n\n char_boundary = word_boundary = None\n\n\n\nThis chunk contains the implementation of the Morse_code class, which is a decoder for Morse code. It is part of a larger document that likely contains other classes and functionality related to decoding and processing text.", "metadata": {"chunk_id": "doc_15_chunk_0", "original_index": 0, "pid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_0"}, "type": "Document"} +{"page_content": " char_boundary = word_boundary = None\n char_priority = word_priority = 0\n # Custom loop allows early break\n for i in ctext:\n i_priority = self.BOUNDARIES.get(i)\n if i_priority is None:\n if i in self.ALLOWED:\n continue\n logging.debug(f\"Non-morse char '{i}' found\")\n return None\n\n\n\nThe chunk is part of the `decode` method of the `Morse_code` class, which is responsible for decoding a given ciphertext that is assumed to be in Morse code.", "metadata": {"chunk_id": "doc_15_chunk_1", "original_index": 1, "pid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_1"}, "type": "Document"} +{"page_content": " if i_priority <= char_priority or i == char_boundary or i == word_boundary:\n continue\n # Default to having a char boundary over a word boundary\n if (\n i_priority > word_priority\n and word_boundary is None\n and char_boundary is not None\n ):\n word_priority = i_priority\n word_boundary = i\n continue\n char_priority = i_priority\n char_boundary = i\n\n logging.debug(\n f\"Char boundary is unicode {ord(char_boundary)}, and word boundary is unicode {ord(word_boundary) if word_boundary is not None else None}\"\n )\n\n result = \"\"\n\n\n\nThe chunk is part of the `decode` method of the `Morse_code` class, which is responsible for decoding Morse code text. The code in the chunk is responsible for identifying the character and word boundaries in the input text, and then using those boundaries to decode the Morse code.", "metadata": {"chunk_id": "doc_15_chunk_2", "original_index": 2, "pid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_2"}, "type": "Document"} +{"page_content": " for word in ctext.split(word_boundary) if word_boundary else [ctext]:\n logging.debug(f\"Attempting to decode word {word}\")\n for char in word.split(char_boundary):\n char = char.translate(self.PURGE)\n if len(char) == 0:\n continue\n try:\n m = self.MORSE_CODE_DICT_INV[char]\n except KeyError:\n logging.debug(f\"Invalid codeword '{char}' found\")\n return None\n result = result + m\n # after every word add a space\n result = result + \" \"\n if len(result) == 0:\n logging.debug(\"Morse code failed to match\")\n return None\n # Remove trailing space\n result = result[:-1]\n logging.info(f\"Morse code successful, returning {result}\")\n return result.strip().upper()\n\n\n\nThis chunk is the core of the `decode` method of the `Morse_code` class, which is responsible for decoding a given ciphertext that is assumed to be in Morse code format.", "metadata": {"chunk_id": "doc_15_chunk_3", "original_index": 3, "pid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_3"}, "type": "Document"} +{"page_content": " @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.MORSE_CODE_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.MORSE_CODE_DICT_INV = {v: k for k, v in self.MORSE_CODE_DICT.items()}\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The morse code dictionary to use\",\n req=False,\n default=\"cipheydists::translate::morse\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"morse_code\"\n\n\nThis chunk defines the class-level methods and attributes for the Morse_code decoder class, including the priority, initialization, parameter specification, and target name.", "metadata": {"chunk_id": "doc_15_chunk_4", "original_index": 4, "pid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_4"}, "type": "Document"} +{"page_content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n\n\nThis chunk defines a Soundex cracker class within a larger document that likely contains other cracker classes for different ciphers or encoding schemes. The Soundex cracker class is registered with the registry and provides methods for getting information about the cracker, the target it handles, and attempting to crack Soundex-encoded text.", "metadata": {"chunk_id": "doc_16_chunk_0", "original_index": 0, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_0"}, "type": "Document"} +{"page_content": " def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n\n\nThe provided chunk is the `attemptCrack` method of the `Soundex` class, which is a cracker implementation for the Soundex algorithm. This method attempts to crack the Soundex-encoded text by generating all possible combinations and finding the corresponding words and sentences.", "metadata": {"chunk_id": "doc_16_chunk_1", "original_index": 1, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_1"}, "type": "Document"} +{"page_content": " # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n\n\nThe provided chunk is part of the `attemptCrack` method of the `Soundex` class, which is a cracker for the Soundex algorithm. The chunk is responsible for processing the input ciphertext, ensuring it meets the Soundex format requirements, and generating a list of possible words and sentences based on the Soundex codes.", "metadata": {"chunk_id": "doc_16_chunk_2", "original_index": 2, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_2"}, "type": "Document"} +{"page_content": " sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n\n\nThe provided chunk is part of the `attemptCrack` method of the `Soundex` class, which is a cracker for the Soundex algorithm. The chunk is responsible for sorting the generated sentences based on the frequency of the words in each sentence and returning the sorted results.", "metadata": {"chunk_id": "doc_16_chunk_3", "original_index": 3, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_3"}, "type": "Document"} +{"page_content": " def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n\n\nThe provided chunk is a part of the `getSentenceCombo` method within the `Soundex` class, which is a cracker for the Soundex algorithm. This method is responsible for generating all possible sentences from a given set of Soundex codes by recursively combining the words that correspond to each code.", "metadata": {"chunk_id": "doc_16_chunk_4", "original_index": 4, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_4"}, "type": "Document"} +{"page_content": " for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n\n\nThe provided chunk is a part of the `Soundex` class, which is a cracker for the Soundex algorithm. The chunk includes the `getSentenceCombo` method, which recursively generates all possible sentences from a given set of Soundex codes, and the `getParams` and `__init__` methods, which handle the configuration and resource loading for the Soundex cracker.", "metadata": {"chunk_id": "doc_16_chunk_5", "original_index": 5, "pid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_5"}, "type": "Document"} +{"page_content": "# by https://github.com/RustyDucky and https://github.com/lukasgabriel\n\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Tap_code(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Tap code decoding\n \"\"\"\n try:\n result = \"\"\n combinations = ctext.split(\" \")\n for fragment in combinations:\n result += self.TABLE.get(fragment)\n return result\n except Exception:\n return None\n\n\n\nThis chunk contains the implementation of a Tap code decoder, which is a type of cipher that uses a grid of letters to encode and decode messages.", "metadata": {"chunk_id": "doc_17_chunk_0", "original_index": 0, "pid": "44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3_0"}, "type": "Document"} +{"page_content": " @staticmethod\n def priority() -> float:\n return 0.06\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.TABLE = config.get_resource(self._params()[\"dict\"], Translation)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The table of letters used for the tap code interpretation.\",\n req=False,\n default=\"cipheydists::translate::tap_code\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"tap_code\"\n\n\nThe chunk represents the implementation of the Tap Code decoder class, including its priority, initialization, parameter specification, and target identification.", "metadata": {"chunk_id": "doc_17_chunk_1", "original_index": 1, "pid": "44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3_1"}, "type": "Document"} +{"page_content": "from functools import lru_cache\nfrom typing import Any, Dict, Optional, Set\n\nimport cipheydists\nimport logging\n\nfrom ciphey.iface import (\n Config,\n Distribution,\n ParamSpec,\n ResourceLoader,\n Translation,\n WordList,\n registry,\n)\n\n\n@registry.register_multi(WordList, Distribution, Translation)\nclass CipheyDists(ResourceLoader):\n # _wordlists: Set[str] = frozenset({\"english\", \"english1000\", \"englishStopWords\"})\n # _brandons: Set[str] = frozenset({\"english\"})\n # _dists: Set[str] = frozenset({\"twist\"})\n # _translates: Set[str] = frozenset({\"morse\"})\n _getters = {\n \"list\": cipheydists.get_list,\n \"dist\": cipheydists.get_dist,\n \"brandon\": cipheydists.get_brandon,\n \"translate\": cipheydists.get_translate,\n }\n\n\n\nThis chunk defines a class called `CipheyDists` that inherits from the `ResourceLoader` class. The class is responsible for loading various resources such as word lists, distributions, and translations used by the Ciphey cryptanalysis tool.", "metadata": {"chunk_id": "doc_18_chunk_0", "original_index": 0, "pid": "bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2_0"}, "type": "Document"} +{"page_content": " def whatResources(self) -> Optional[Set[str]]:\n pass\n\n @lru_cache()\n def getResource(self, name: str) -> Any:\n logging.debug(f\"Loading cipheydists resource {name}\")\n prefix, name = name.split(\"::\", 1)\n return self._getters[prefix](name)\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n\nThe provided chunk is a part of the implementation of the `CipheyDists` class, which is a resource loader for the Ciphey project. The class is responsible for loading various resources such as word lists, distributions, and translations from the `cipheydists` library.", "metadata": {"chunk_id": "doc_18_chunk_1", "original_index": 1, "pid": "bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2_1"}, "type": "Document"} +{"page_content": "# Translated to Python and adapted for Ciphey from the JS original at https://github.com/pshihn/base69\n\n\nimport re\nfrom math import ceil\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, WordList, registry\n\n\n@registry.register\nclass Base69(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base69 decoding\n \"\"\"\n # Remove whitespace\n try:\n ctext = re.sub(r\"\\s+\", \"\", ctext, flags=re.UNICODE)\n extra_bytes = 0\n clen = len(ctext)\n\n if ctext[:-1] == \"=\":\n extra_bytes = int(ctext[clen - 2])\n\n CHUNK_COUNT = ceil(clen / 16)\n result = [0 for _ in range(CHUNK_COUNT * 7 - extra_bytes)]\n\n\n\nThe chunk is the implementation of the Base69 decoder class, which is responsible for decoding Base69-encoded text. It is part of a larger document that includes the full implementation of the Base69 decoder.", "metadata": {"chunk_id": "doc_19_chunk_0", "original_index": 0, "pid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_0"}, "type": "Document"} +{"page_content": " for i in range(CHUNK_COUNT):\n chunk_string = ctext[i * 16 : (i + 1) * 16]\n if extra_bytes and (i == CHUNK_COUNT - 1):\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert[0 : 7 - extra_bytes]):\n result[n + i * 7] = elem\n else:\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert):\n result[n + i * 7] = elem % 256\n return bytearray(result).decode().strip(\"\\x00\")\n except Exception:\n return None\n\n\n\nThe provided chunk is part of the `decode` method of the `Base69` class, which is responsible for performing Base69 decoding on the input ciphertext. The chunk iterates through the input ciphertext, decoding each chunk and storing the results in the `result` list. The final decoded text is then returned as a string.", "metadata": {"chunk_id": "doc_19_chunk_1", "original_index": 1, "pid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_1"}, "type": "Document"} +{"page_content": " def decode_chunk(self, s: str):\n padded_bytes = s.endswith(\"=\")\n\n decoded = [0 for _ in range(8)]\n for i in range(8):\n decoded[i] = (\n 0\n if i == 7 and padded_bytes\n else self.chars_to_byte(s[i * 2 : i * 2 + 2])\n )\n\n result = [0 for _ in range(7)]\n for i in range(7):\n t1 = decoded[i] << (i + 1)\n t2 = decoded[i + 1] >> (7 - i - 1)\n result[i] = t1 | t2\n return result\n\n\n\nThe `decode_chunk` function is a helper function within the `Base69` class, which is a Ciphey decoder that performs Base69 decoding on the provided ciphertext.", "metadata": {"chunk_id": "doc_19_chunk_2", "original_index": 2, "pid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_2"}, "type": "Document"} +{"page_content": " def chars_to_byte(self, s: str):\n return (69 * self.CHARS.index(s[1])) + (self.CHARS.index(s[0]))\n\n @staticmethod\n def priority() -> float:\n # If this becomes lower or equal to the reverse, it breaks.\n # So I'll set it to 0.2 for now since it is very fast anyways.\n return 0.2\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.CHARS = config.get_resource(self._params()[\"dict\"], WordList)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The charset used for the decoder.\",\n req=False,\n default=\"cipheydists::list::base69\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"base69\"\n\n\nThe provided chunk contains the implementation of the `chars_to_byte` method, the `priority` method, the `__init__` method, the `getParams` method, and the `getTarget` method for the `Base69` class, which is a Decoder class in the Ciphey project that performs Base69 decoding.", "metadata": {"chunk_id": "doc_19_chunk_3", "original_index": 3, "pid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_3"}, "type": "Document"} +{"page_content": "import pytest\n\nfrom ciphey import decrypt\nfrom ciphey.iface import Config\n\nanswer_str = \"Hello my name is bee and I like dog and apple and tree\"\n\n\ndef test_a1z26():\n res = decrypt(\n Config().library_default().complete_config(),\n \"8 5 12 12 15 13 25 14 1 13 5 9 19 2 5 5 1 14 4 9 12 9 11 5 4 15 7 1 14 4 1 16 16 12 5 1 14 4 20 18 5 5\",\n )\n assert res == \"hellomynameisbeeandilikedogandappleandtree\"\n\n\n\nThis chunk contains unit tests for the Ciphey library, specifically testing the decryption functionality for various ciphers and encodings.", "metadata": {"chunk_id": "doc_20_chunk_0", "original_index": 0, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_0"}, "type": "Document"} +{"page_content": "\ndef test_affine():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Ihsst bf kxbh rd ghh xky R srjh ytz xky xccsh xky muhh\",\n )\n assert res == answer_str\n\n\ndef test_ascii_shift():\n res = decrypt(\n Config().library_default().complete_config(),\n '\"?FFIzGSzH;G?zCMzz#zFCE?z>IAz;H>z;JJF?z;H>zNL??',\n )\n assert res == answer_str\n\n\ndef test_atbash():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Svool nb mznv rh yvv zmw R orpv wlt zmw zkkov zmw givv\",\n )\n assert res == answer_str\n\n\n\nThis chunk contains tests for the affine, ASCII shift, and atbash ciphers within the larger document of tests for various ciphers and decryption methods.", "metadata": {"chunk_id": "doc_20_chunk_1", "original_index": 1, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_1"}, "type": "Document"} +{"page_content": "\ndef test_baconian_complete_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABB ABABB ABBBA ABBAA BBAAA ABBAB AAAAA ABBAA AABAA ABAAA BAABA AAAAB AABAA AABAA AAAAA ABBAB AAABB ABAAA ABABB ABAAA ABABA AABAA AAABB ABBBA AABBA AAAAA ABBAB AAABB AAAAA ABBBB ABBBB ABABB AABAA AAAAA ABBAB AAABB BAABB BAAAB AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n\n\nThe chunk is a test function that checks the decryption of a Baconian complete variant cipher, which is one of the many cipher tests included in the overall document.", "metadata": {"chunk_id": "doc_20_chunk_2", "original_index": 2, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_2"}, "type": "Document"} +{"page_content": "\ndef test_baconian_standard_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABA ABABA ABBAB ABABB BABBA ABBAA AAAAA ABABB AABAA ABAAA BAAAB AAAAB AABAA AABAA AAAAA ABBAA AAABB ABAAA ABABA ABAAA ABAAB AABAA AAABB ABBAB AABBA AAAAA ABBAA AAABB AAAAA ABBBA ABBBA ABABA AABAA AAAAA ABBAA AAABB BAABA BAAAA AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n\n\nThis chunk tests the decryption of a message encoded using the Baconian standard variant cipher, which is one of the many cipher types tested in this document.", "metadata": {"chunk_id": "doc_20_chunk_3", "original_index": 3, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_3"}, "type": "Document"} +{"page_content": "\ndef test_base32():\n res = decrypt(\n Config().library_default().complete_config(),\n \"JBSWY3DPEBWXSIDOMFWWKIDJOMQGEZLFEBQW4ZBAJEQGY2LLMUQGI33HEBQW4ZBAMFYHA3DFEBQW4ZBAORZGKZI=\",\n )\n assert res == answer_str\n\n\ndef test_base58_bitcoin():\n res = decrypt(\n Config().library_default().complete_config(),\n \"6qYhNwsP46Mn4gy6gyANfsMm2icAxGFA6gnFjVm9phYHeby7PZm3vthiXxSU77teQgTFGbHETn\",\n )\n assert res == answer_str\n\n\ndef test_base58_ripple():\n res = decrypt(\n Config().library_default().complete_config(),\n \"aqY64A1PhaM8hgyagyw4C1Mmp5cwxGEwag8EjVm9F6YHebyfPZmsvt65XxS7ffteQgTEGbHNT8\",\n )\n assert res == answer_str\n\n\n\nThis chunk tests the decryption of various base encoding ciphers, including Base32, Base58 Bitcoin, and Base58 Ripple.", "metadata": {"chunk_id": "doc_20_chunk_4", "original_index": 4, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_4"}, "type": "Document"} +{"page_content": "\ndef test_base62():\n res = decrypt(\n Config().library_default().complete_config(),\n \"2mQvnz9Yevvb7DRCuyDltsP31vJLToR5pjE9orWkzHMUsht2kbC96PLbZ1sdIocsGHENrzC2n\",\n )\n assert res == answer_str\n\n\ndef test_base64():\n res = decrypt(\n Config().library_default().complete_config(),\n \"SGVsbG8gbXkgbmFtZSBpcyBiZWUgYW5kIEkgbGlrZSBkb2cgYW5kIGFwcGxlIGFuZCB0cmVl\",\n )\n\n assert res == answer_str\n\n\n\nThe chunk tests the decryption of base62 and base64 encoded text using the Ciphey library.", "metadata": {"chunk_id": "doc_20_chunk_5", "original_index": 5, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_5"}, "type": "Document"} +{"page_content": "\ndef test_base69():\n res = decrypt(\n Config().library_default().complete_config(),\n \"kAZAtABBeB8A-AoB8ADBNAhBLA1AFBgA0AXBfBGATAVAFBgAwAWBHBu7ARt\",\n )\n assert res == answer_str\n\n\ndef test_base91():\n res = decrypt(\n Config().library_default().complete_config(),\n \">OwJh>=/fV@$x88j9ZNKB*ge$yV%lE%ZKi,+<]>-.-[+>-----<]>++.+++++++..+++.+[+>++<]>.[++>+<]>---.--[+++>-<]>.-[+>++++<]>.[++>+<]>--.-[+++>++<]>-.+[-->---<]>.--------.[+++++>+<]>+.-[+++>--<]>-.++++++++++.---[+>++<]>.[+++>-<]>++.+++..[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.-[+++>+<]>--.-[+>----<]>.++[+++>--<]>.---.++.------.[+++++>+<]>+.+[+>---<]>+.+++++++++++.--------.-[+++>-<]>--.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[+++>-<]>+.-[-->---<]>..----.-------.[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[++>+<]>++++.--.-------------..\",\n )\n assert res == answer_str\n\n\n\nThe chunk is a test function that checks the decryption of a Braille-encoded message and a Brainfuck-encoded message, and asserts that the decrypted result matches the expected answer string.", "metadata": {"chunk_id": "doc_20_chunk_10", "original_index": 10, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_10"}, "type": "Document"} +{"page_content": "\ndef test_brandon():\n res = decrypt(\n Config().library_default().complete_config(),\n \"R hvv blf tzgsvi yvuliv nv...sfmtib...gviirurvw... Xofgxsrmt blfi yzyvh gl blfi yivzhg. Vnkvili Vnsbi srh nzixsvw srh ovtrlmh rmgl lfi ozmwh... Ozrw hrvtv gl vevib uligivhh uiln sviv gl gsv Yofv Nlfmgzrmh. Izyrw zmw izevmlfh, sv yrgvh zmw yrgvh zdzb. Nvm lu gsv Mligs, blf hgzmw zg gsv kivxrkrxv. Blfi prmth szev uzrovw blf, hl mld blf gfim gl gsv tlwh! Zmw bvg blf wl mlg kovzw? Blf wl mlg pmvvo gl wfhg blfi svzwh drgs zhs? Rmhgvzw blf dzro, Dsb szev gsv tlwh ulihzpvm fh? Dv nfhg ollp rmgl gsv girzoh dv uzrovw olmt ztl! Rm z grnv kzhhvw, \n\nThe chunk is a test function that decrypts a string using the \"brandon\" cipher. It is part of a larger document that contains multiple test functions for various encryption/decryption algorithms.", "metadata": {"chunk_id": "doc_20_chunk_11", "original_index": 11, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_11"}, "type": "Document"} +{"page_content": "lfi dliow rmgvigdrmvw drgs zmlgsvi gsilfts zm fksvzezo hxslozih xzoo gsv Xlmqfmxgrlm lu gsv Hksvivh... Gsv tlwh zooldvw fmslob ulixvh gl hork rmgl lfi wlnzrm. Gsv luuhkirmt lu gszg xzgzxobhn dzh gsv mvuvirlfh ulixv xzoovw nztrx... Bvg dv wrw mlg yzmrhs rg, rmhgvzw hgfwbrmt gsv erov zixzmv uli lfi kldvi zmw dvzogs! Zmw gsv nlmhgvih zg lfi wlli...gsv fmslob ivorxgh lu gsrh Xlmqfmxgrlm? ...gsv gilooh...gsv xlikhv vzgvih...gsv dvivdloevh? Wrw dv izrhv lfi hdliwh ztzrmhg gsvn? Li szev dv ozrw gsrh yfiwvm lm lgsvih? Lm hl-xzoovw drgxsvih? Hgizb xsrowivm gzftsg gsv dzbh lu ulfo hlixvib, \n\nThe chunk appears to be a passage discussing the Conjunction of the Spheres, a cataclysmic event that allowed unholy forces to slip into the domain, and the resulting monsters and dark magic that plagued the land.", "metadata": {"chunk_id": "doc_20_chunk_12", "original_index": 12, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_12"}, "type": "Document"} +{"page_content": "gsvri ylwrvh nfgzgvw gsilfts yozhksvnlfh irgfzo. Hvmg gl urtsg nlmhgvih gslfts gsvb xlfow mlg wrhgrmtfrhs tllw uiln vero. Gsv uorxpvi lu sfnzmrgb olmt vcgrmtfrhsvw drgsrm gsvn. Bvh, gsvri mfnyvih szev wdrmwovw gsilfts gsv bvzih. Yfg z uvd hgroo ilzn lfi ozmwh, luuvirmt gsvri yollwb dlip uli xlrm. Gl gsrh wzb gsvb hsznv fh drgs gsvri evib vcrhgvmxv! Gsv Mligs yovvwh, uolttvw yb dzi. Gsv yzggovh ziv gsv tlwh' dsrk, xszhgrhvnvmg uli lfi hrmh! Zmw ovg fh mlg ulitvg gsv gviilih, gsv hxlfitvh uiln yvblmw lfi dliow! Gsv Drow Sfmg irwvh gsv hpb drgs vevib ufoo nllm! Gsv wzip izrwvih zywfxg lfi xsrowivm rmgl ozmwh fmpmldm! Hlnv hzb gsvb svizow z hvxlmw Xlmqfmxgrlm! Xzm dv xszig z xlfihv yzxp rmgl gsv ortsg? Droo dv urmw gsv hgivmtgs gl yzmrhs gsv nztvh uiln lfi prmtwlnh? Fmrgv zilfmw gsv dzings lu gsv Vgvimzo Uriv? Mrts rh gsv Grnv lu gsv Hdliw zmw gsv Zcv! Mlmv droo urtsg gsrh dzi rm lfi hgvzw! Mrts rh gsv Grnv lu Nzwmvhh zmw Wrhwzrm!\",\n )\n assert bool(res) is True\n\n\n\nThis chunk appears to be a passage describing the dire state of the world, with references to monsters, dark forces, and the need to unite against them. It seems to be part of a larger narrative or story.", "metadata": {"chunk_id": "doc_20_chunk_13", "original_index": 13, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_13"}, "type": "Document"} +{"page_content": "\ndef test_caesar():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Uryyb zl anzr vf orr naq V yvxr qbt naq nccyr naq gerr\",\n )\n assert res == answer_str\n\n\ndef test_decimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"72 101 108 108 111 32 109 121 32 110 97 109 101 32 105 115 32 98 101 101 32 97 110 100 32 73 32 108 105 107 101 32 100 111 103 32 97 110 100 32 97 112 112 108 101 32 97 110 100 32 116 114 101 101\",\n )\n assert res == answer_str\n\n\ndef test_dna():\n res = decrypt(\n Config().library_default().complete_config(),\n \"GAT AAT GCT ATT TCT ATT AAT ACT GAA CGT GAA TCT ACT ATT AAT GGT\",\n )\n assert res == \"DNAISINTERESTING\"\n\n\ndef test_dtmf():\n res = decrypt(\n Config().library_default().complete_config(),\n\n\nThis chunk contains tests for various decryption methods, including Caesar cipher, decimal, DNA, and DTMF decryption. It is part of a larger document that appears to be a test suite for the Ciphey library, which is a tool for automatically detecting and decrypting encrypted text.", "metadata": {"chunk_id": "doc_20_chunk_14", "original_index": 14, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_14"}, "type": "Document"} +{"page_content": " \"1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697\",\n\nThe chunk is a test case for the DTMF (Dual-Tone Multi-Frequency) decryption function in the Ciphey library. It tests the decryption of a string of DTMF tones representing the phrase \"Hello my name is bee and I like dog and apple and tree\".", "metadata": {"chunk_id": "doc_20_chunk_15", "original_index": 15, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_15"}, "type": "Document"} +{"page_content": "\n )\n assert res == answer_str\n\n\ndef test_galactic():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᒷꖎꖎ𝙹 ᒲ|| リᔑᒲᒷ ╎ᓭ ʖᒷᒷ ᔑリ↸ i ꖎ╎ꖌᒷ ↸𝙹⊣ ᔑリ↸ ᔑ!¡!¡ꖎᒷ ᔑリ↸ ℸ ̣ ∷ᒷᒷ\",\n )\n assert res == answer_str.lower()\n\n\ndef test_galactic_Xproblem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᔑꖎ╎⎓ᔑ ̇/, ̇/||ꖎ𝙹!¡⍑𝙹リᒷ, ᔑ ̇/ ᔑꖎ𝙹リᒷ ᔑリ↸ ̇/ᒷ∷𝙹 ̇/ ⎓∷𝙹ᒲ 𝙹 ̇/⎓𝙹∷↸\",\n )\n assert res == \"halifax, xylophone, a x alone and xerox from oxford\"\n\n\ndef test_gzip():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H4sIAAzul18A/yXJzQmAMBSEwVa+ckwZT7LIw80P6sXuA3ocZpM9aC89msibXSJ6peA8RR3Hx5jTfzyXtAAbQvCyNgAAAA==\",\n )\n assert res == answer_str\n\n\n\nThe chunk is a set of unit tests for the `decrypt` function in the Ciphey library, which is a tool for automatically decrypting encrypted text. The tests cover a wide range of cipher types, including base64, Vigenère, and Morse code, among others.", "metadata": {"chunk_id": "doc_20_chunk_16", "original_index": 16, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_16"}, "type": "Document"} +{"page_content": "\ndef test_hexadecimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"48 65 6c 6c 6f 20 6d 79 20 6e 61 6d 65 20 69 73 20 62 65 65 20 61 6e 64 20 49 20 6c 69 6b 65 20 64 6f 67 20 61 6e 64 20 61 70 70 6c 65 20 61 6e 64 20 74 72 65 65\",\n )\n\n assert res == answer_str\n\n\ndef test_json_problem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"0110100001100101011011000110110001101111\",\n )\n assert res != \"0110100001100101011011000110110001101111\"\n\n\ndef test_leetspeak():\n res = decrypt(\n Config().library_default().complete_config(),\n \"|-|3ll0 my n4m3 1s 833 4nd 1 l1k3 D06 4ND 4ppl3 4nd 7R33\",\n )\n assert res.lower() == answer_str.lower()\n\n\n\nThis chunk contains tests for the hexadecimal, JSON, and leetspeak decryption functions within the Ciphey library.", "metadata": {"chunk_id": "doc_20_chunk_17", "original_index": 17, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_17"}, "type": "Document"} +{"page_content": "\ndef test_morse_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \".... . .-.. .-.. ---/-- -.--/-. .- -- ./.. .../-... . ./.- -. -../../.-.. .. -.- ./-.. --- --./.- -. -../.- .--. .--. .-.. ./.- -. -../- .-. . .\",\n )\n assert res == answer_str.upper()\n\n\ndef test_multi_tap():\n res = decrypt(\n Config().library_default().complete_config(),\n \"44 33 555 555 666 0 6 999 0 66 2 6 33 0 444 7777 0 22 33 33 0 2 66 3 0 444 0 555 444 55 33 0 3 666 4 0 2 66 3 0 2 7 7 555 33 0 2 66 3 0 8 777 33 33\",\n )\n assert res == answer_str.upper()\n\n\n\nThe chunk contains tests for the Morse code and multi-tap cipher decoders in the Ciphey library.", "metadata": {"chunk_id": "doc_20_chunk_18", "original_index": 18, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_18"}, "type": "Document"} +{"page_content": "\ndef test_new_line_at_start_returns():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"\\npass\\n\")\n\n assert res == \"\\npass\\n\"\n\n\ndef test_new_line_strip_and_return():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"pass\\n\")\n\n assert res == \"pass\\n\"\n\n\ndef test_octal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"110 145 154 154 157 40 155 171 40 156 141 155 145 40 151 163 40 142 145 145 40 141 156 144 40 111 40 154 151 153 145 40 144 157 147 40 141 156 144 40 141 160 160 154 145 40 141 156 144 40 164 162 145 145\",\n )\n assert res == answer_str\n\n\n\nThe chunk tests the behavior of the decrypt function when the input starts or ends with a newline character, as well as testing the decryption of octal-encoded text.", "metadata": {"chunk_id": "doc_20_chunk_19", "original_index": 19, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_19"}, "type": "Document"} +{"page_content": "\ndef test_plaintext():\n res = decrypt(Config().library_default().complete_config(), answer_str)\n assert res == answer_str\n\n\ndef test_quadgrams_messed_up_spacing():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\",\n )\n assert (\n res == \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\"\n )\n\n\ndef test_quadgrams_no_spaces():\n res = decrypt(\n Config().library_default().complete_config(),\n \"HellomynameisbeeandIlikedogandappleandtree\",\n )\n assert res == \"HellomynameisbeeandIlikedogandappleandtree\"\n\n\n\nThis chunk contains tests for the plaintext and quadgrams decryption methods in the Ciphey library. The tests check that the decryption functions correctly handle various input formats, including plaintext, messed up spacing, and no spaces.", "metadata": {"chunk_id": "doc_20_chunk_20", "original_index": 20, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_20"}, "type": "Document"} +{"page_content": "\ndef test_quadgrams_space_between_every_letter():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\",\n )\n assert (\n res\n == \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\"\n )\n\n\ndef test_reversed_text():\n res = decrypt(\n Config().library_default().complete_config(),\n \"eert dna elppa dna god ekil I dna eeb si eman ym olleH\",\n )\n assert res == answer_str\n\n\n\nThe provided chunk contains two test functions that verify the decryption of reversed text and text with spaces between every letter using the Ciphey library. These tests are part of a larger suite of tests that cover various cipher and encoding types.", "metadata": {"chunk_id": "doc_20_chunk_21", "original_index": 21, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_21"}, "type": "Document"} +{"page_content": "\ndef test_rot47():\n res = decrypt(\n Config().library_default().complete_config(),\n \"$A9:?I @7 3=24< BF2CEK[ ;F586 >J G@H\",\n )\n assert res == \"Sphinx of black quartz, judge my vow\"\n\n\ndef test_soundex():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H236 I200 I500 T000 P230\",\n )\n assert res.lower() == \"history is in the past\"\n\n\ndef test_tap_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \"4,4 1,5 4,3 4,4 3,4 3,3 1,5 4,4 5,2 3,4 4,4 2,3 4,2 1,5 1,5\",\n )\n assert res == \"test one two three\".upper()\n\n\ndef test_url():\n res = decrypt(\n Config().library_default().complete_config(),\n \"https%3A%2F%2Fwww%2Egoogle%2Ecom%2Fsearch%3Fq%3Dciphey\",\n )\n assert res == \"https://www.google.com/search?q=ciphey\"\n\n\n\nThis chunk contains tests for various decryption methods, including ROT47, Soundex, Tap Code, and URL decoding. These tests are part of a larger set of tests for the Ciphey library, which is a tool for automatically detecting and decrypting encrypted text.", "metadata": {"chunk_id": "doc_20_chunk_22", "original_index": 22, "pid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_22"}, "type": "Document"} +{"page_content": "\ndef test_uuencode():\n res = decrypt(\n Config().library_default().complete_config(),\n 'begin 644 /dev/stdout\\nM2&5L;&\\\\@;7D@;F%M92!I\n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\n\n\nThis chunk contains functions that generate various types of data, including numbers, booleans, and fixed-length strings, for use in testing or other purposes. It is part of a larger document that likely includes additional data generation functions for other data types.", "metadata": {"chunk_id": "doc_21_chunk_0", "original_index": 0, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_0"}, "type": "Document"} +{"page_content": "std::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n\n\nThe provided chunk is a function named `MakeStrings()` that returns a vector of strings. This function is part of a larger C++ file that includes various value generator functions for different data types used in the Clickhouse database.", "metadata": {"chunk_id": "doc_21_chunk_1", "original_index": 1, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_1"}, "type": "Document"} +{"page_content": " \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\n\n\nThe chunk is part of the `MakeStrings()` function, which generates a vector of strings for testing purposes. The function includes several long strings to test how the system handles large string values.", "metadata": {"chunk_id": "doc_21_chunk_2", "original_index": 2, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_2"}, "type": "Document"} +{"page_content": "std::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n\n\nThe provided chunk contains two functions, `MakeUUIDs()` and `MakeDateTime64s()`, which are part of a larger namespace that includes various value generator functions for different data types used in the Clickhouse database.", "metadata": {"chunk_id": "doc_21_chunk_3", "original_index": 3, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_3"}, "type": "Document"} +{"page_content": " // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n\n\nThe provided chunk is part of a C++ code that generates various data types for testing purposes, including DateTime64, Date32, and other numeric and string types. The chunk specifically focuses on the implementation of the `MakeDateTime64s` and `MakeDates32` functions, which generate vectors of DateTime64 and Date32 values, respectively.", "metadata": {"chunk_id": "doc_21_chunk_4", "original_index": 4, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_4"}, "type": "Document"} +{"page_content": " // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\n\n\nThe chunk is part of a function that generates a vector of `int32_t` values representing dates in the Clickhouse Date32 format. The function also generates negative values for dates before the epoch, as Clickhouse supports pre-epoch dates.", "metadata": {"chunk_id": "doc_21_chunk_5", "original_index": 5, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_5"}, "type": "Document"} +{"page_content": "std::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n\n\nThe provided chunk contains two functions, `MakeInt128s()` and `MakeDecimals()`, which are part of a larger set of value generator functions in the `value_generators.h` header file. These functions are responsible for generating sample data of various types, including 128-bit integers and decimal values, for testing and other purposes.", "metadata": {"chunk_id": "doc_21_chunk_6", "original_index": 6, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_6"}, "type": "Document"} +{"page_content": " std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\n\n\nThe provided chunk contains two functions: `MakeDecimals` and `FooBarGenerator`. The `MakeDecimals` function generates a vector of `clickhouse::Int128` values based on a given scale, while the `FooBarGenerator` function generates a string based on the input index.", "metadata": {"chunk_id": "doc_21_chunk_7", "original_index": 7, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_7"}, "type": "Document"} +{"page_content": "std::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n\n\nThe provided chunk contains functions that generate sample IPv4 and IPv6 addresses for testing purposes. These functions are part of a larger codebase that includes various value generators for different data types used in the Clickhouse database.", "metadata": {"chunk_id": "doc_21_chunk_8", "original_index": 8, "pid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_8"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n\n\nThe provided chunk represents the implementation of the `ColumnTuple` class, which is part of the `clickhouse` namespace. This class represents a column of tuples, where each tuple is composed of one or more columns. The chunk includes the class definition, constructor, and several member functions related to managing the column data.", "metadata": {"chunk_id": "doc_22_chunk_0", "original_index": 0, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_0"}, "type": "Document"} +{"page_content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n\n\nThe provided chunk represents the public member functions of the `ColumnTuple` class, which is part of the `clickhouse` namespace. These functions are responsible for loading, saving, and manipulating the data within the column.", "metadata": {"chunk_id": "doc_22_chunk_1", "original_index": 1, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_1"}, "type": "Document"} +{"page_content": "private:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n\n\nThe provided chunk represents the private member variables and a nested class definition within the `ColumnTuple` class, which is part of the `clickhouse` namespace. The `ColumnTupleT` class is a template-based implementation of the `ColumnTuple` class, providing a more type-safe interface for working with tuples of columns.", "metadata": {"chunk_id": "doc_22_chunk_2", "original_index": 2, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_2"}, "type": "Document"} +{"page_content": " ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n\n\nThe provided chunk is part of the implementation of the `ColumnTupleT` class, which is a specialized version of the `ColumnTuple` class that represents a column of tuples. The chunk includes the constructors, member functions, and helper methods that allow for the manipulation and access of the tuple data within the column.", "metadata": {"chunk_id": "doc_22_chunk_3", "original_index": 3, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_3"}, "type": "Document"} +{"page_content": " /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n\n\nThe chunk is a static method within the ColumnTupleT class, which is a specialized version of the ColumnTuple class in the clickhouse namespace. The method is responsible for creating a ColumnTupleT object from a ColumnTuple object, without copying the data and offsets, but by \"stealing\" them from the input ColumnTuple object.", "metadata": {"chunk_id": "doc_22_chunk_4", "original_index": 4, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_4"}, "type": "Document"} +{"page_content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\n\n\nThe provided chunk is part of the implementation of the `ColumnTupleT` class, which is a specialized version of the `ColumnTuple` class that represents a column of tuples. The chunk includes static helper methods for wrapping and manipulating `ColumnTuple` objects, as well as overridden methods for slicing, cloning, and swapping the column.", "metadata": {"chunk_id": "doc_22_chunk_5", "original_index": 5, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_5"}, "type": "Document"} +{"page_content": "private:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n\n\nThe provided chunk is a private member function named `AppendTuple` within the `ColumnTupleT` class, which is a template class that represents a column of a Tuple([T]) in the clickhouse namespace.", "metadata": {"chunk_id": "doc_22_chunk_6", "original_index": 6, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_6"}, "type": "Document"} +{"page_content": " template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n\n\nThe provided chunk is a part of the `ColumnTupleT` class, which is a template class that represents a column of a Tuple([T]) in the clickhouse namespace. The `TupleToVector` function is a helper function that converts a tuple to a vector of `ColumnRef` objects.", "metadata": {"chunk_id": "doc_22_chunk_7", "original_index": 7, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_7"}, "type": "Document"} +{"page_content": " template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n\n\nThe provided chunk is a part of the `ColumnTupleT` class, which is a template class that represents a column of Tuple([T]). The `VectorToTuple` function is a static helper function that converts a vector of `ColumnRef` objects into a tuple of shared pointers to the corresponding column types.", "metadata": {"chunk_id": "doc_22_chunk_8", "original_index": 8, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_8"}, "type": "Document"} +{"page_content": " template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n\n\nThe provided chunk is a part of the implementation of the `ColumnTupleT` class, which is a template class that represents a column of Tuple([T]) in the Clickhouse namespace. The chunk includes a recursive function `GetTupleOfValues` that retrieves the values of the tuple at a given index, and a member variable `typed_columns_` that stores the individual columns of the tuple.", "metadata": {"chunk_id": "doc_22_chunk_9", "original_index": 9, "pid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_9"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n\n\nThe provided chunk is the definition of the `ColumnIPv4` class within the `clickhouse` namespace. This class is responsible for handling IPv4 addresses within the Clickhouse database system.", "metadata": {"chunk_id": "doc_23_chunk_0", "original_index": 0, "pid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_0"}, "type": "Document"} +{"page_content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n\n\nThe provided chunk is part of the implementation of the `ColumnIPv4` class, which is a column type in the `clickhouse` namespace that represents IPv4 addresses. The chunk includes methods for appending IPv4 addresses to the column, retrieving elements at a given row, and converting the values to strings, as well as methods for managing the column's capacity and appending content from other columns.", "metadata": {"chunk_id": "doc_23_chunk_1", "original_index": 1, "pid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_1"}, "type": "Document"} +{"page_content": " /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n\n\nThe provided chunk is part of the implementation of the `ColumnIPv4` class, which is a column type in the `clickhouse` namespace. The chunk contains the method declarations for loading and saving column data, clearing the column, getting the size of the column, creating a slice of the column, cloning an empty column, swapping columns, and getting an item view of a specific row in the column.", "metadata": {"chunk_id": "doc_23_chunk_2", "original_index": 2, "pid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_2"}, "type": "Document"} +{"page_content": "#include \"type_parser.h\"\n\n#include \"clickhouse/exceptions.h\"\n#include \"clickhouse/base/platform.h\" // for _win_\n\n#include \n#include \n#include \n#include \n#include \n\n#if defined _win_\n#include \n#else\n#include \n#endif\n\n\nnamespace clickhouse {\n\nbool TypeAst::operator==(const TypeAst & other) const {\n return meta == other.meta\n && code == other.code\n && name == other.name\n && value == other.value\n && std::equal(elements.begin(), elements.end(), other.elements.begin(), other.elements.end());\n}\n\n\n\nThe provided chunk is the beginning of the `clickhouse` namespace in the `type_parser.h` file. It includes the necessary header files, defines the `TypeAst` class, and sets up the initial namespace.", "metadata": {"chunk_id": "doc_24_chunk_0", "original_index": 0, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_0"}, "type": "Document"} +{"page_content": "static const std::unordered_map kTypeCode = {\n { \"Void\", Type::Void },\n { \"Int8\", Type::Int8 },\n { \"Int16\", Type::Int16 },\n { \"Int32\", Type::Int32 },\n { \"Int64\", Type::Int64 },\n { \"Bool\", Type::UInt8 },\n { \"UInt8\", Type::UInt8 },\n { \"UInt16\", Type::UInt16 },\n { \"UInt32\", Type::UInt32 },\n { \"UInt64\", Type::UInt64 },\n { \"Float32\", Type::Float32 },\n { \"Float64\", Type::Float64 },\n { \"String\", Type::String },\n { \"FixedString\", Type::FixedString },\n { \"DateTime\", Type::DateTime },\n { \"DateTime64\", Type::DateTime64 },\n\n\nThe chunk represents a static unordered map that maps string representations of ClickHouse data types to their corresponding Type::Code values. This map is used to translate between the string representation and the internal representation of the data types.", "metadata": {"chunk_id": "doc_24_chunk_1", "original_index": 1, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_1"}, "type": "Document"} +{"page_content": " { \"Date\", Type::Date },\n { \"Date32\", Type::Date32 },\n { \"Array\", Type::Array },\n { \"Nullable\", Type::Nullable },\n { \"Tuple\", Type::Tuple },\n { \"Enum8\", Type::Enum8 },\n { \"Enum16\", Type::Enum16 },\n { \"UUID\", Type::UUID },\n { \"IPv4\", Type::IPv4 },\n { \"IPv6\", Type::IPv6 },\n { \"Int128\", Type::Int128 },\n// { \"UInt128\", Type::UInt128 },\n { \"Decimal\", Type::Decimal },\n { \"Decimal32\", Type::Decimal32 },\n { \"Decimal64\", Type::Decimal64 },\n { \"Decimal128\", Type::Decimal128 },\n { \"LowCardinality\", Type::LowCardinality },\n { \"Map\", Type::Map },\n { \"Point\", Type::Point },\n { \"Ring\", Type::Ring },\n { \"Polygon\", Type::Polygon },\n { \"MultiPolygon\", Type::MultiPolygon },\n};\n\n\n\nThe chunk is a part of the static initialization of a map that maps type names to their corresponding type codes in the ClickHouse database. This map is used by the TypeParser class to parse and recognize various data types supported by ClickHouse.", "metadata": {"chunk_id": "doc_24_chunk_2", "original_index": 2, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_2"}, "type": "Document"} +{"page_content": "template \ninline int CompateStringsCaseInsensitive(const L& left, const R& right) {\n int64_t size_diff = left.size() - right.size();\n if (size_diff != 0)\n return size_diff > 0 ? 1 : -1;\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n}\n\nstatic Type::Code GetTypeCode(const std::string& name) {\n auto it = kTypeCode.find(name);\n if (it != kTypeCode.end()) {\n return it->second;\n }\n\n return Type::Void;\n}\n\nstatic TypeAst::Meta GetTypeMeta(const StringView& name) {\n if (name == \"Array\") {\n return TypeAst::Array;\n }\n\n\n\nThe provided chunk contains utility functions for parsing and handling ClickHouse data types. The `CompateStringsCaseInsensitive` function compares two strings case-insensitively, while the `GetTypeCode` and `GetTypeMeta` functions map type names to their corresponding codes and metadata, respectively.", "metadata": {"chunk_id": "doc_24_chunk_3", "original_index": 3, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_3"}, "type": "Document"} +{"page_content": " if (name == \"Null\") {\n return TypeAst::Null;\n }\n\n if (name == \"Nullable\") {\n return TypeAst::Nullable;\n }\n\n if (name == \"Tuple\") {\n return TypeAst::Tuple;\n }\n\n if (name == \"Enum8\" || name == \"Enum16\") {\n return TypeAst::Enum;\n }\n\n if (name == \"LowCardinality\") {\n return TypeAst::LowCardinality;\n }\n\n if (name == \"SimpleAggregateFunction\") {\n return TypeAst::SimpleAggregateFunction;\n }\n\n if (name == \"Map\") {\n return TypeAst::Map;\n }\n\n return TypeAst::Terminal;\n}\n\n\n\nThe provided chunk is a part of the `GetTypeMeta` function, which is responsible for determining the type metadata based on the input string. This function is used within the `TypeParser` class to parse and analyze the structure of a ClickHouse data type.", "metadata": {"chunk_id": "doc_24_chunk_4", "original_index": 4, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_4"}, "type": "Document"} +{"page_content": "bool ValidateAST(const TypeAst& ast) {\n // Void terminal that is not actually \"void\" produced when unknown type is encountered.\n if (ast.meta == TypeAst::Terminal\n && ast.code == Type::Void\n && CompateStringsCaseInsensitive(ast.name, std::string_view(\"void\")) != 0)\n //throw UnimplementedError(\"Unsupported type: \" + ast.name);\n return false;\n\n return true;\n}\n\n\nTypeParser::TypeParser(const StringView& name)\n : cur_(name.data())\n , end_(name.data() + name.size())\n , type_(nullptr)\n{\n}\n\nTypeParser::~TypeParser() = default;\n\nbool TypeParser::Parse(TypeAst* type) {\n type_ = type;\n open_elements_.push(type_);\n\n\n\nThe provided chunk is a part of the `clickhouse` namespace implementation, specifically the `TypeParser` class and the `ValidateAST` function. These components are responsible for parsing and validating the abstract syntax tree (AST) representation of ClickHouse data types.", "metadata": {"chunk_id": "doc_24_chunk_5", "original_index": 5, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_5"}, "type": "Document"} +{"page_content": " size_t processed_tokens = 0;\n do {\n const Token & token = NextToken();\n switch (token.type) {\n case Token::QuotedString:\n {\n type_->meta = TypeAst::Terminal;\n if (token.value.length() < 1)\n type_->value_string = {};\n else\n type_->value_string = token.value.substr(1, token.value.length() - 2).to_string();\n\n\nThe provided chunk is part of the `TypeParser::Parse()` function, which is responsible for parsing a type name and constructing a `TypeAst` object representing the parsed type.", "metadata": {"chunk_id": "doc_24_chunk_6", "original_index": 6, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_6"}, "type": "Document"} +{"page_content": " type_->code = Type::String;\n break;\n }\n case Token::Name:\n type_->meta = GetTypeMeta(token.value);\n type_->name = token.value.to_string();\n type_->code = GetTypeCode(type_->name);\n break;\n case Token::Number:\n type_->meta = TypeAst::Number;\n type_->value = std::stol(token.value.to_string());\n break;\n case Token::String:\n type_->meta = TypeAst::String;\n type_->value_string = std::string(token.value);\n\n\nThe provided chunk is part of the `TypeParser::Parse()` function, which is responsible for parsing a type name and constructing a `TypeAst` object representing the parsed type.", "metadata": {"chunk_id": "doc_24_chunk_7", "original_index": 7, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_7"}, "type": "Document"} +{"page_content": " break;\n case Token::LPar:\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::RPar:\n type_ = open_elements_.top();\n open_elements_.pop();\n break;\n case Token::Assign:\n case Token::Comma:\n type_ = open_elements_.top();\n open_elements_.pop();\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::EOS:\n {\n // Ubalanced braces, brackets, etc is an error.\n if (open_elements_.size() != 1)\n return false;\n\n\n\nThe provided chunk is part of the `TypeParser::NextToken()` function, which is responsible for parsing the input string and extracting the various tokens that make up the type definition. The chunk handles the logic for processing different types of tokens, such as parentheses, assignments, and commas, and manages the stack of open elements during the parsing process.", "metadata": {"chunk_id": "doc_24_chunk_8", "original_index": 8, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_8"}, "type": "Document"} +{"page_content": " // Empty input string, no tokens produced\n if (processed_tokens == 0)\n return false;\n\n return ValidateAST(*type);\n }\n case Token::Invalid:\n return false;\n }\n ++processed_tokens;\n } while (true);\n}\n\nTypeParser::Token TypeParser::NextToken() {\n for (; cur_ < end_; ++cur_) {\n switch (*cur_) {\n case ' ':\n case '\\n':\n case '\\t':\n case '\\0':\n continue;\n case '=':\n return Token{Token::Assign, StringView(cur_++, 1)};\n case '(':\n return Token{Token::LPar, StringView(cur_++, 1)};\n case ')':\n return Token{Token::RPar, StringView(cur_++, 1)};\n\n\nThe provided chunk is part of the `TypeParser` class implementation, which is responsible for parsing a type name string and constructing a corresponding `TypeAst` object. The chunk includes the end of the `Parse` method and the beginning of the `NextToken` method, which are responsible for tokenizing the input string and building the `TypeAst` representation.", "metadata": {"chunk_id": "doc_24_chunk_9", "original_index": 9, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_9"}, "type": "Document"} +{"page_content": " case ',':\n return Token{Token::Comma, StringView(cur_++, 1)};\n case '\\'':\n {\n const auto end_quote_length = 1;\n const StringView end_quote{cur_, end_quote_length};\n // Fast forward to the closing quote.\n const auto start = cur_++;\n for (; cur_ < end_ - end_quote_length; ++cur_) {\n // TODO (nemkov): handle escaping ?\n if (end_quote == StringView{cur_, end_quote_length}) {\n cur_ += end_quote_length;\n\n\n\nThe chunk is part of the `NextToken()` function in the `TypeParser` class, which is responsible for parsing the input string and extracting the next token from it.", "metadata": {"chunk_id": "doc_24_chunk_10", "original_index": 10, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_10"}, "type": "Document"} +{"page_content": " return Token{Token::QuotedString, StringView{start, cur_}};\n }\n }\n return Token{Token::QuotedString, StringView(cur_++, 1)};\n }\n\n default: {\n const char* st = cur_;\n\n if (*cur_ == '\\'') {\n for (st = ++cur_; cur_ < end_; ++cur_) {\n if (*cur_ == '\\'') {\n return Token{Token::String, StringView(st, cur_++ - st)};\n }\n }\n\n\n\nThe provided chunk is part of the `NextToken()` function in the `TypeParser` class, which is responsible for parsing the input string and extracting the next token from it.", "metadata": {"chunk_id": "doc_24_chunk_11", "original_index": 11, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_11"}, "type": "Document"} +{"page_content": " return Token{Token::Invalid, StringView()};\n }\n\n if (isalpha(*cur_) || *cur_ == '_') {\n for (; cur_ < end_; ++cur_) {\n if (!isalpha(*cur_) && !isdigit(*cur_) && *cur_ != '_') {\n break;\n }\n }\n\n return Token{Token::Name, StringView(st, cur_)};\n }\n\n if (isdigit(*cur_) || *cur_ == '-') {\n for (++cur_; cur_ < end_; ++cur_) {\n if (!isdigit(*cur_)) {\n break;\n }\n }\n\n\n\nThe provided chunk is part of the `NextToken()` function in the `TypeParser` class, which is responsible for parsing the input string and identifying the different tokens (e.g., names, numbers, strings) that make up the type definition.", "metadata": {"chunk_id": "doc_24_chunk_12", "original_index": 12, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_12"}, "type": "Document"} +{"page_content": " return Token{Token::Number, StringView(st, cur_)};\n }\n\n return Token{Token::Invalid, StringView()};\n }\n }\n }\n\n return Token{Token::EOS, StringView()};\n}\n\n\nconst TypeAst* ParseTypeName(const std::string& type_name) {\n // Cache for type_name.\n // Usually we won't have too many type names in the cache, so do not try to\n // limit cache size.\n static std::map ast_cache;\n static std::mutex lock;\n\n std::lock_guard guard(lock);\n auto it = ast_cache.find(type_name);\n if (it != ast_cache.end()) {\n return &it->second;\n }\n\n auto& ast = ast_cache[type_name];\n if (TypeParser(type_name).Parse(&ast)) {\n return *\n }\n ast_cache.erase(type_name);\n return nullptr;\n}\n\n}\n\n\nThe chunk of code represents the implementation of the `NextToken()` method in the `TypeParser` class, which is responsible for parsing the input string and extracting the next token. The `ParseTypeName()` function is a utility function that caches the parsed type names to improve performance.", "metadata": {"chunk_id": "doc_24_chunk_13", "original_index": 13, "pid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_13"}, "type": "Document"} +{"page_content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n// * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n// * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n\n\nThis chunk contains the copyright notice and licensing information for the Google C++ Testing and Mocking Framework (Google Test).", "metadata": {"chunk_id": "doc_25_chunk_0", "original_index": 0, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_0"}, "type": "Document"} +{"page_content": "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n\nThis chunk contains the copyright notice and disclaimer for the Google C++ Testing and Mocking Framework (Google Test).", "metadata": {"chunk_id": "doc_25_chunk_1", "original_index": 1, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_1"}, "type": "Document"} +{"page_content": "//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the public API for death tests. It is\n// #included by gtest.h so a user doesn't need to include this\n// directly.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n#define GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n\n#include \"gtest/internal/gtest-death-test-internal.h\"\n\nnamespace testing {\n\n// This flag controls the style of death tests. Valid values are \"threadsafe\",\n// meaning that the death test child process will re-execute the test binary\n// from the start, running only a single death test, or \"fast\",\n// meaning that the child process will execute the test logic immediately\n// after forking.\nGTEST_DECLARE_string_(death_test_style);\n\n#if GTEST_HAS_DEATH_TEST\n\nnamespace internal {\n\n\n\nThis chunk defines the public API for death tests in the Google C++ Testing and Mocking Framework (Google Test). It includes the declaration of a flag that controls the style of death tests and the definition of the namespace for internal functions related to death tests.", "metadata": {"chunk_id": "doc_25_chunk_2", "original_index": 2, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_2"}, "type": "Document"} +{"page_content": "// Returns a Boolean value indicating whether the caller is currently\n// executing in the context of the death test child process. Tools such as\n// Valgrind heap checkers may need this to modify their behavior in death\n// tests. IMPORTANT: This is an internal utility. Using it may break the\n// implementation of death tests. User code MUST NOT use it.\nGTEST_API_ bool InDeathTestChild();\n\n} // namespace internal\n\n// The following macros are useful for writing death tests.\n\n// Here's what happens when an ASSERT_DEATH* or EXPECT_DEATH* is\n// executed:\n//\n// 1. It generates a warning if there is more than one active\n// thread. This is because it's safe to fork() or clone() only\n// when there is a single thread.\n//\n// 2. The parent process clone()s a sub-process and runs the death\n// test in it; the sub-process exits with code 0 at the end of the\n// death test, if it hasn't exited already.\n//\n\n\nThis chunk describes the internal implementation details of Google Test's death tests, including a function to check if the current process is the death test child process, and an overview of the steps involved in executing a death test.", "metadata": {"chunk_id": "doc_25_chunk_3", "original_index": 3, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_3"}, "type": "Document"} +{"page_content": "// 3. The parent process waits for the sub-process to terminate.\n//\n// 4. The parent process checks the exit code and error message of\n// the sub-process.\n//\n// Examples:\n//\n// ASSERT_DEATH(server.SendMessage(56, \"Hello\"), \"Invalid port number\");\n// for (int i = 0; i < 5; i++) {\n// EXPECT_DEATH(server.ProcessRequest(i),\n// \"Invalid request .* in ProcessRequest()\")\n// << \"Failed to die on request \" << i;\n// }\n//\n// ASSERT_EXIT(server.ExitNow(), ::testing::ExitedWithCode(0), \"Exiting\");\n//\n// bool KilledBySIGHUP(int exit_code) {\n// return WIFSIGNALED(exit_code) && WTERMSIG(exit_code) == SIGHUP;\n// }\n//\n// ASSERT_EXIT(client.HangUpServer(), KilledBySIGHUP, \"Hanging up!\");\n//\n// The final parameter to each of these macros is a matcher applied to any data\n\n\nThis chunk describes the behavior of the death test macros provided by the Google C++ Testing and Mocking Framework (Google Test), including examples of how to use them.", "metadata": {"chunk_id": "doc_25_chunk_4", "original_index": 4, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_4"}, "type": "Document"} +{"page_content": "// the sub-process wrote to stderr. For compatibility with existing tests, a\n// bare string is interpreted as a regular expression matcher.\n//\n// On the regular expressions used in death tests:\n//\n// GOOGLETEST_CM0005 DO NOT DELETE\n// On POSIX-compliant systems (*nix), we use the library,\n// which uses the POSIX extended regex syntax.\n//\n// On other platforms (e.g. Windows or Mac), we only support a simple regex\n// syntax implemented as part of Google Test. This limited\n// implementation should be enough most of the time when writing\n// death tests; though it lacks many features you can find in PCRE\n// or POSIX extended regex syntax. For example, we don't support\n\n\nThis chunk provides details on the regular expression syntax supported by Google Test's death tests, including the differences between POSIX-compliant systems and other platforms.", "metadata": {"chunk_id": "doc_25_chunk_5", "original_index": 5, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_5"}, "type": "Document"} +{"page_content": "// union (\"x|y\"), grouping (\"(xy)\"), brackets (\"[xy]\"), and\n// repetition count (\"x{5,7}\"), among others.\n//\n// Below is the syntax that we do support. We chose it to be a\n// subset of both PCRE and POSIX extended regex, so it's easy to\n// learn wherever you come from. In the following: 'A' denotes a\n// literal character, period (.), or a single \\\\ escape sequence;\n// 'x' and 'y' denote regular expressions; 'm' and 'n' are for\n\n\nThis chunk describes the regular expression syntax supported by the Google Test framework for writing death tests.", "metadata": {"chunk_id": "doc_25_chunk_6", "original_index": 6, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_6"}, "type": "Document"} +{"page_content": "// natural numbers.\n//\n// c matches any literal character c\n// \\\\d matches any decimal digit\n// \\\\D matches any character that's not a decimal digit\n// \\\\f matches \\f\n// \\\\n matches \\n\n// \\\\r matches \\r\n// \\\\s matches any ASCII whitespace, including \\n\n// \\\\S matches any character that's not a whitespace\n// \\\\t matches \\t\n// \\\\v matches \\v\n// \\\\w matches any letter, _, or decimal digit\n// \\\\W matches any character that \\\\w doesn't match\n// \\\\c matches any literal character c, which must be a punctuation\n// . matches any single character except \\n\n\n\nThis chunk describes the regular expression syntax supported by the Google Test framework for writing death tests.", "metadata": {"chunk_id": "doc_25_chunk_7", "original_index": 7, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_7"}, "type": "Document"} +{"page_content": "// A? matches 0 or 1 occurrences of A\n// A* matches 0 or many occurrences of A\n// A+ matches 1 or many occurrences of A\n// ^ matches the beginning of a string (not that of each line)\n// $ matches the end of a string (not that of each line)\n// xy matches x followed by y\n//\n// If you accidentally use PCRE or POSIX extended regex features\n// not implemented by us, you will get a run-time failure. In that\n\n\nThis chunk describes the regular expression syntax supported by the Google Test framework for writing death tests.", "metadata": {"chunk_id": "doc_25_chunk_8", "original_index": 8, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_8"}, "type": "Document"} +{"page_content": "// case, please try to rewrite your regular expression within the\n// above syntax.\n//\n// This implementation is *not* meant to be as highly tuned or robust\n// as a compiled regex library, but should perform well enough for a\n// death test, which already incurs significant overhead by launching\n// a child process.\n//\n// Known caveats:\n//\n// A \"threadsafe\" style death test obtains the path to the test\n// program from argv[0] and re-executes it in the sub-process. For\n// simplicity, the current implementation doesn't search the PATH\n// when launching the sub-process. This means that the user must\n// invoke the test program via a path that contains at least one\n// path separator (e.g. path/to/foo_test and\n// /absolute/path/to/bar_test are fine, but foo_test is not). This\n// is rarely a problem as people usually don't put the test binary\n// directory in PATH.\n//\n\n\n\nThis chunk provides details about the regular expression syntax supported by the Google Test death test implementation, as well as known caveats regarding the \"threadsafe\" style of death tests.", "metadata": {"chunk_id": "doc_25_chunk_9", "original_index": 9, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_9"}, "type": "Document"} +{"page_content": "// Asserts that a given `statement` causes the program to exit, with an\n// integer exit status that satisfies `predicate`, and emitting error output\n// that matches `matcher`.\n# define ASSERT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_FATAL_FAILURE_)\n\n// Like `ASSERT_EXIT`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_NONFATAL_FAILURE_)\n\n\n\nThis chunk defines macros for writing death tests in the Google C++ Testing and Mocking Framework (Google Test). The macros allow asserting that a given statement causes the program to exit with a specific exit status and error output matching a specified pattern.", "metadata": {"chunk_id": "doc_25_chunk_10", "original_index": 10, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_10"}, "type": "Document"} +{"page_content": "// Asserts that a given `statement` causes the program to exit, either by\n// explicitly exiting with a nonzero exit code or being killed by a\n// signal, and emitting error output that matches `matcher`.\n# define ASSERT_DEATH(statement, matcher) \\\n ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Like `ASSERT_DEATH`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_DEATH(statement, matcher) \\\n EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*:\n\n\n\nThis chunk defines the ASSERT_DEATH and EXPECT_DEATH macros, which are used to write death tests in Google Test, a unit testing framework for C++. The chunk also defines two predicate classes that can be used with the ASSERT_EXIT and EXPECT_EXIT macros.", "metadata": {"chunk_id": "doc_25_chunk_11", "original_index": 11, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_11"}, "type": "Document"} +{"page_content": "// Tests that an exit code describes a normal exit with a given exit code.\nclass GTEST_API_ ExitedWithCode {\n public:\n explicit ExitedWithCode(int exit_code);\n ExitedWithCode(const ExitedWithCode&) = default;\n void operator=(const ExitedWithCode& other) = delete;\n bool operator()(int exit_status) const;\n private:\n const int exit_code_;\n};\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// Tests that an exit code describes an exit due to termination by a\n// given signal.\n// GOOGLETEST_CM0006 DO NOT DELETE\nclass GTEST_API_ KilledBySignal {\n public:\n explicit KilledBySignal(int signum);\n bool operator()(int exit_status) const;\n private:\n const int signum_;\n};\n# endif // !GTEST_OS_WINDOWS\n\n\n\nThis chunk defines two classes, `ExitedWithCode` and `KilledBySignal`, which are used as predicate classes in the `ASSERT_EXIT` and `EXPECT_EXIT` macros provided by the Google Test framework. These classes are used to test the exit status of a program in death tests.", "metadata": {"chunk_id": "doc_25_chunk_12", "original_index": 12, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_12"}, "type": "Document"} +{"page_content": "// EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode.\n// The death testing framework causes this to have interesting semantics,\n// since the sideeffects of the call are only visible in opt mode, and not\n// in debug mode.\n//\n// In practice, this can be used to test functions that utilize the\n// LOG(DFATAL) macro using the following style:\n//\n// int DieInDebugOr12(int* sideeffect) {\n// if (sideeffect) {\n// *sideeffect = 12;\n// }\n\n\nThe chunk describes the usage and semantics of the `EXPECT_DEBUG_DEATH` macro, which is used to assert that certain statements die in debug mode, while their side-effects are only visible in optimized mode.", "metadata": {"chunk_id": "doc_25_chunk_13", "original_index": 13, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_13"}, "type": "Document"} +{"page_content": "// LOG(DFATAL) << \"death\";\n// return 12;\n// }\n//\n// TEST(TestSuite, TestDieOr12WorksInDgbAndOpt) {\n// int sideeffect = 0;\n// // Only asserts in dbg.\n// EXPECT_DEBUG_DEATH(DieInDebugOr12(&sideeffect), \"death\");\n//\n// #ifdef NDEBUG\n// // opt-mode has sideeffect visible.\n// EXPECT_EQ(12, sideeffect);\n// #else\n// // dbg-mode no visible sideeffect.\n// EXPECT_EQ(0, sideeffect);\n// #endif\n// }\n//\n// This will assert that DieInDebugReturn12InOpt() crashes in debug\n// mode, usually due to a DCHECK or LOG(DFATAL), but returns the\n// appropriate fallback value (12 in this case) in opt mode. If you\n// need to test that a function has appropriate side-effects in opt\n// mode, include assertions against the side-effects. A general\n// pattern for this is:\n//\n// EXPECT_DEBUG_DEATH({\n// // Side-effects here will have an effect after this statement in\n// // opt mode, but none in debug mode.\n// EXPECT_EQ(12, DieInDebugOr12(&sideeffect));\n// }, \"death\");\n//\n# ifdef NDEBUG\n\n\n\nThis chunk provides an example usage of the EXPECT_DEBUG_DEATH macro, which asserts that the given statements die in debug mode. The example demonstrates how to test functions that utilize the LOG(DFATAL) macro, and how to handle side-effects that are only visible in opt mode.", "metadata": {"chunk_id": "doc_25_chunk_14", "original_index": 14, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_14"}, "type": "Document"} +{"page_content": "# define EXPECT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# else\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n\n# endif // NDEBUG for EXPECT_DEBUG_DEATH\n#endif // GTEST_HAS_DEATH_TEST\n\n\n\nThis chunk defines macros for handling debug death tests in the Google Test framework. It provides different implementations of `EXPECT_DEBUG_DEATH` and `ASSERT_DEBUG_DEATH` based on whether the code is compiled in debug or release mode.", "metadata": {"chunk_id": "doc_25_chunk_15", "original_index": 15, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_15"}, "type": "Document"} +{"page_content": "// This macro is used for implementing macros such as\n// EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED on systems where\n// death tests are not supported. Those macros must compile on such systems\n// if and only if EXPECT_DEATH and ASSERT_DEATH compile with the same parameters\n// on systems that support death tests. This allows one to write such a macro on\n// a system that does not support death tests and be sure that it will compile\n// on a death-test supporting system. It is exposed publicly so that systems\n// that have death-tests with stricter requirements than GTEST_HAS_DEATH_TEST\n\n\nThis chunk describes a macro used to implement macros for death tests on systems that do not support death tests, in order to ensure that the macros compile on both death-test supporting and non-supporting systems.", "metadata": {"chunk_id": "doc_25_chunk_16", "original_index": 16, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_16"}, "type": "Document"} +{"page_content": "// can write their own equivalent of EXPECT_DEATH_IF_SUPPORTED and\n// ASSERT_DEATH_IF_SUPPORTED.\n//\n// Parameters:\n// statement - A statement that a macro such as EXPECT_DEATH would test\n// for program termination. This macro has to make sure this\n// statement is compiled but not executed, to ensure that\n// EXPECT_DEATH_IF_SUPPORTED compiles with a certain\n// parameter if and only if EXPECT_DEATH compiles with it.\n// regex - A regex that a macro such as EXPECT_DEATH would use to test\n\n\nThis chunk describes the parameters used in the GTEST_UNSUPPORTED_DEATH_TEST macro, which is used to implement EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED macros on systems where death tests are not supported.", "metadata": {"chunk_id": "doc_25_chunk_17", "original_index": 17, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_17"}, "type": "Document"} +{"page_content": "// the output of statement. This parameter has to be\n// compiled but not evaluated by this macro, to ensure that\n// this macro only accepts expressions that a macro such as\n// EXPECT_DEATH would accept.\n// terminator - Must be an empty statement for EXPECT_DEATH_IF_SUPPORTED\n// and a return statement for ASSERT_DEATH_IF_SUPPORTED.\n// This ensures that ASSERT_DEATH_IF_SUPPORTED will not\n// compile inside functions where ASSERT_DEATH doesn't\n// compile.\n//\n// The branch that has an always false condition is used to ensure that\n// statement and regex are compiled (and thus syntactically correct) but\n// never executed. The unreachable code macro protects the terminator\n// statement from generating an 'unreachable code' warning in case\n// statement unconditionally returns or throws. The Message constructor at\n\n\nThis chunk describes the parameters and implementation details of the `GTEST_UNSUPPORTED_DEATH_TEST` macro, which is used to provide a fallback implementation for death test macros when death tests are not supported on the platform.", "metadata": {"chunk_id": "doc_25_chunk_18", "original_index": 18, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_18"}, "type": "Document"} +{"page_content": "// the end allows the syntax of streaming additional messages into the\n// macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH.\n# define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \\\n GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n if (::testing::internal::AlwaysTrue()) { \\\n GTEST_LOG_(WARNING) \\\n << \"Death tests are not supported on this platform.\\n\" \\\n << \"Statement '\" #statement \"' cannot be verified.\"; \\\n } else if (::testing::internal::AlwaysFalse()) { \\\n ::testing::internal::RE::PartialMatch(\".*\", (regex)); \\\n GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n terminator; \\\n } else \\\n ::testing::Message()\n\n\n\nThis chunk defines a macro `GTEST_UNSUPPORTED_DEATH_TEST` that provides a fallback implementation for death tests when they are not supported on the platform. It is used by the `EXPECT_DEATH_IF_SUPPORTED` and `ASSERT_DEATH_IF_SUPPORTED` macros to ensure compatibility across platforms.", "metadata": {"chunk_id": "doc_25_chunk_19", "original_index": 19, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_19"}, "type": "Document"} +{"page_content": "// EXPECT_DEATH_IF_SUPPORTED(statement, regex) and\n// ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if\n// death tests are supported; otherwise they just issue a warning. This is\n// useful when you are combining death test assertions with normal test\n// assertions in one test.\n#if GTEST_HAS_DEATH_TEST\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n#else\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, )\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return)\n#endif\n\n} // namespace testing\n\n#endif // GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n\n\nThis chunk defines macros for handling death tests in the Google C++ Testing and Mocking Framework (Google Test). It provides a way to conditionally execute death tests based on whether the platform supports them, and issues a warning if they are not supported.", "metadata": {"chunk_id": "doc_25_chunk_20", "original_index": 20, "pid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_20"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \n#include \n\nnamespace clickhouse {\n\nclass InputStream;\nclass OutputStream;\n\nclass WireFormat {\npublic:\n template \n static bool ReadFixed(InputStream& input, T* value);\n static bool ReadString(InputStream& input, std::string* value);\n static bool SkipString(InputStream& input);\n static bool ReadBytes(InputStream& input, void* buf, size_t len);\n static bool ReadUInt64(InputStream& input, uint64_t* value);\n static bool ReadVarint64(InputStream& output, uint64_t* value);\n\n template \n static void WriteFixed(OutputStream& output, const T& value);\n static void WriteBytes(OutputStream& output, const void* buf, size_t len);\n static void WriteString(OutputStream& output, std::string_view value);\n static void WriteUInt64(OutputStream& output, const uint64_t value);\n static void WriteVarint64(OutputStream& output, uint64_t value);\n\n\n\nThe provided chunk is the declaration of the `WireFormat` class, which is part of the `clickhouse` namespace. This class provides static methods for reading and writing various data types, such as fixed-size values, strings, and variable-length integers, to and from input and output streams.", "metadata": {"chunk_id": "doc_26_chunk_0", "original_index": 0, "pid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36_0"}, "type": "Document"} +{"page_content": "private:\n static bool ReadAll(InputStream& input, void* buf, size_t len);\n static void WriteAll(OutputStream& output, const void* buf, size_t len);\n};\n\ntemplate \ninline bool WireFormat::ReadFixed(InputStream& input, T* value) {\n return ReadAll(input, value, sizeof(T));\n}\n\ninline bool WireFormat::ReadString(InputStream& input, std::string* value) {\n uint64_t len = 0;\n if (ReadVarint64(input, &len)) {\n if (len > 0x00FFFFFFULL) {\n return false;\n }\n value->resize((size_t)len);\n return ReadAll(input, value->data(), (size_t)len);\n }\n\n\n\nThe provided chunk is part of the `WireFormat` class within the `clickhouse` namespace. It contains the private member functions `ReadAll` and `WriteAll`, as well as the implementation of the `ReadFixed` and `ReadString` member functions.", "metadata": {"chunk_id": "doc_26_chunk_1", "original_index": 1, "pid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36_1"}, "type": "Document"} +{"page_content": " return false;\n}\n\ninline bool WireFormat::ReadBytes(InputStream& input, void* buf, size_t len) {\n return ReadAll(input, buf, len);\n}\n\ninline bool WireFormat::ReadUInt64(InputStream& input, uint64_t* value) {\n return ReadVarint64(input, value);\n}\n\ntemplate \ninline void WireFormat::WriteFixed(OutputStream& output, const T& value) {\n WriteAll(output, &value, sizeof(T));\n}\n\ninline void WireFormat::WriteBytes(OutputStream& output, const void* buf, size_t len) {\n WriteAll(output, buf, len);\n}\n\ninline void WireFormat::WriteString(OutputStream& output, std::string_view value) {\n WriteVarint64(output, value.size());\n WriteAll(output, value.data(), value.size());\n}\n\ninline void WireFormat::WriteUInt64(OutputStream& output, const uint64_t value) {\n WriteVarint64(output, value);\n}\n\n}\n\n\nThe provided chunk is a part of the implementation of the `WireFormat` class, which is responsible for reading and writing various data types to and from input/output streams in a binary format. The chunk includes the implementation of methods for reading and writing fixed-size data, bytes, strings, and unsigned 64-bit integers.", "metadata": {"chunk_id": "doc_26_chunk_2", "original_index": 2, "pid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36_2"}, "type": "Document"} +{"page_content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n\n\nThis chunk of code defines the implementation of the `Column` class in the `clickhouse` namespace, which is likely part of a larger project related to the ClickHouse database. The `Column` class provides methods for loading and saving column data to and from input and output streams, respectively.", "metadata": {"chunk_id": "doc_27_chunk_0", "original_index": 0, "pid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d_0"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"nullable.h\"\n\n#include \n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate \nclass ColumnLowCardinalityT;\n\nnamespace details {\n\n/** LowCardinalityHashKey used as key in unique items hashmap to abstract away key value\n * (type of which depends on dictionary column) and to reduce likelehood of collisions.\n *\n * In order to dramatically reduce collision rate, we use 2 different hashes from 2 different hash functions.\n * First hash is used in hashtable (to calculate item position).\n * Second one is used as part of key value and accessed via `operator==()` upon collision resolution/detection.\n */\nusing LowCardinalityHashKey = std::pair;\n\n\n\nThe provided chunk is the beginning of the `clickhouse` namespace in a C++ header file that defines the `ColumnLowCardinality` and `ColumnLowCardinalityT` classes, which are part of the Clickhouse database client library. The chunk includes the necessary include statements and the definition of the `LowCardinalityHashKey` type used within the `ColumnLowCardinality` class.", "metadata": {"chunk_id": "doc_28_chunk_0", "original_index": 0, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_0"}, "type": "Document"} +{"page_content": "struct LowCardinalityHashKeyHash {\n inline std::size_t operator()(const LowCardinalityHashKey &hash_key) const noexcept {\n return hash_key.first;\n }\n};\n\n}\n\n/*\n * LC column contains an \"invisible\" default item at the beginning of the collection. [default, ...]\n * If the nested type is Nullable, it contains a null-item at the beginning and a default item at the second position. [null, default, ...]\n * Null map is not serialized in LC columns. Instead, nulls are tracked by having an index of 0.\n * */\nclass ColumnLowCardinality : public Column {\npublic:\n using UniqueItems = std::unordered_map;\n\n\n\nThe provided chunk is part of the implementation of the `ColumnLowCardinality` class, which is a column type in the `clickhouse` namespace. The chunk includes the definition of the `LowCardinalityHashKeyHash` struct, which is used as a hash function for the `UniqueItems` unordered map, and the beginning of the `ColumnLowCardinality` class definition.", "metadata": {"chunk_id": "doc_28_chunk_1", "original_index": 1, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_1"}, "type": "Document"} +{"page_content": " template \n friend class ColumnLowCardinalityT;\n\nprivate:\n // IMPLEMENTATION NOTE: ColumnLowCardinalityT takes reference to underlying dictionary column object,\n // so make sure to NOT change address of the dictionary object (with reset(), swap()) or with anything else.\n ColumnRef dictionary_column_;\n ColumnRef index_column_;\n UniqueItems unique_items_map_;\n\npublic:\n ColumnLowCardinality(ColumnLowCardinality&& col) = default;\n // c-tor makes a deep copy of the dictionary_column.\n explicit ColumnLowCardinality(ColumnRef dictionary_column);\n explicit ColumnLowCardinality(std::shared_ptr dictionary_column);\n\n template \n explicit ColumnLowCardinality(std::shared_ptr> dictionary_column)\n : ColumnLowCardinality(dictionary_column->template As())\n {}\n\n\n\nThe provided chunk is part of the implementation of the `ColumnLowCardinality` class, which is a column type in the `clickhouse` namespace. The chunk includes the private member variables of the class, as well as the constructors that initialize the `dictionary_column_` and `index_column_` members.", "metadata": {"chunk_id": "doc_28_chunk_2", "original_index": 2, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_2"}, "type": "Document"} +{"page_content": " ~ColumnLowCardinality();\n\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends another LowCardinality column to the end of this one, updating dictionary.\n void Append(ColumnRef /*column*/) override;\n\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data.\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n\n\nThe provided chunk is part of the implementation of the `ColumnLowCardinality` class, which is a column type in the `clickhouse` namespace. This class represents a low-cardinality column, which is a column with a limited number of unique values. The chunk includes the implementation of various member functions, such as the destructor, capacity management, appending data, loading and saving data, and clearing the column.", "metadata": {"chunk_id": "doc_28_chunk_3", "original_index": 3, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_3"}, "type": "Document"} +{"page_content": " /// Makes slice of current column, with compacted dictionary\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n ItemView GetItem(size_t index) const override;\n\n size_t GetDictionarySize() const;\n TypeRef GetNestedType() const;\n\nprotected:\n std::uint64_t getDictionaryIndex(std::uint64_t item_index) const;\n void appendIndex(std::uint64_t item_index);\n void removeLastIndex();\n ColumnRef GetDictionary();\n\n void AppendUnsafe(const ItemView &);\n\nprivate:\n void Setup(ColumnRef dictionary_column);\n void AppendNullItem();\n void AppendDefaultItem();\n\npublic:\n static details::LowCardinalityHashKey computeHashKey(const ItemView &);\n};\n\n/** Type-aware wrapper that provides simple convenience interface for accessing/appending individual items.\n */\ntemplate \nclass ColumnLowCardinalityT : public ColumnLowCardinality {\n\n\n\nThe provided chunk is part of the implementation of the `ColumnLowCardinality` and `ColumnLowCardinalityT` classes in the `clickhouse` namespace. These classes are responsible for managing low-cardinality columns, which are a type of column that stores data with a limited number of unique values. The chunk includes the method declarations and protected/private member functions that handle the internal operations of these classes.", "metadata": {"chunk_id": "doc_28_chunk_4", "original_index": 4, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_4"}, "type": "Document"} +{"page_content": " DictionaryColumnType& typed_dictionary_;\n const Type::Code type_;\n\npublic:\n using WrappedColumnType = DictionaryColumnType;\n // Type this column takes as argument of Append and returns with At() and operator[]\n using ValueType = typename DictionaryColumnType::ValueType;\n\n explicit ColumnLowCardinalityT(ColumnLowCardinality&& col)\n : ColumnLowCardinality(std::move(col))\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {\n }\n\n template \n explicit ColumnLowCardinalityT(Args &&... args)\n : ColumnLowCardinalityT(std::make_shared(std::forward(args)...))\n {}\n\n\n\nThis chunk defines the member variables and constructor of the `ColumnLowCardinalityT` class, which is a type-aware wrapper around the `ColumnLowCardinality` class. It provides a convenient interface for accessing and appending individual items to the column.", "metadata": {"chunk_id": "doc_28_chunk_5", "original_index": 5, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_5"}, "type": "Document"} +{"page_content": " // Create LC column from existing T-column, making a deep copy of all contents.\n explicit ColumnLowCardinalityT(std::shared_ptr dictionary_col)\n : ColumnLowCardinality(dictionary_col)\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {}\n\n /// Extended interface to simplify reading/adding individual items.\n\n /// Returns element at given row number.\n inline ValueType At(size_t n) const {\n return typed_dictionary_.At(getDictionaryIndex(n));\n }\n\n /// Returns element at given row number.\n inline ValueType operator [] (size_t n) const {\n return typed_dictionary_[getDictionaryIndex(n)];\n }\n\n // so the non-virtual Append below doesn't shadow Append() from base class when compiled with older compilers.\n using ColumnLowCardinality::Append;\n\n\n\nThis chunk is part of the implementation of the `ColumnLowCardinalityT` class, which is a type-aware wrapper around the `ColumnLowCardinality` class. It provides a convenient interface for accessing and appending individual items to the low-cardinality column.", "metadata": {"chunk_id": "doc_28_chunk_6", "original_index": 6, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_6"}, "type": "Document"} +{"page_content": " inline void Append(const ValueType & value) {\n if constexpr (IsNullable) {\n if (value.has_value()) {\n AppendUnsafe(ItemView{type_, *value});\n } else {\n AppendUnsafe(ItemView{});\n }\n } else {\n AppendUnsafe(ItemView{type_, value});\n }\n }\n\n template \n inline void AppendMany(const T& container) {\n for (const auto & item : container) {\n Append(item);\n }\n }\n\n\n\nThe provided chunk is part of the `ColumnLowCardinalityT` class, which is a type-aware wrapper around the `ColumnLowCardinality` class. The `Append` and `AppendMany` functions are part of the extended interface provided by `ColumnLowCardinalityT` to simplify reading and adding individual items to the column.", "metadata": {"chunk_id": "doc_28_chunk_7", "original_index": 7, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_7"}, "type": "Document"} +{"page_content": " /** Create a ColumnLowCardinalityT from a ColumnLowCardinality, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnLowCardinality&& col) {\n return std::make_shared>(std::move(col));\n }\n\n\n\nThe provided chunk is a static method within the `ColumnLowCardinalityT` class, which is part of the `clickhouse` namespace. This method, named `Wrap`, is used to create a `ColumnLowCardinalityT` object from a `ColumnLowCardinality` object without copying the data and offsets, but by \"stealing\" them from the input object.", "metadata": {"chunk_id": "doc_28_chunk_8", "original_index": 8, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_8"}, "type": "Document"} +{"page_content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnLowCardinality::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnLowCardinality::CloneEmpty()); }\n\nprivate:\n\n template \n static auto GetTypeCode(T& column) {\n if constexpr (IsNullable) {\n return GetTypeCode(*column.Nested()->template AsStrict());\n } else {\n return column.Type()->GetCode();\n }\n }\n};\n\n}\n\n\nThe provided chunk is part of the implementation of the `ColumnLowCardinalityT` class, which is a type-aware wrapper around the `ColumnLowCardinality` class. The chunk includes static helper methods for wrapping and manipulating `ColumnLowCardinality` objects, as well as overridden methods for slicing and cloning the column. The `GetTypeCode` function is a private helper method used to determine the type code of the nested column.", "metadata": {"chunk_id": "doc_28_chunk_9", "original_index": 9, "pid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_9"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\n\n\nThe provided chunk is the definition of the `ColumnGeo` class, which is a template class that represents a column of geographic data in the Clickhouse namespace. It includes methods for appending, accessing, and manipulating the column data.", "metadata": {"chunk_id": "doc_29_chunk_0", "original_index": 0, "pid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_0"}, "type": "Document"} +{"page_content": "public:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n\n\nThe provided chunk represents the public member functions of the `ColumnGeo` class, which is a template class that serves as the base class for various types of geographic data columns in the Clickhouse database system.", "metadata": {"chunk_id": "doc_29_chunk_1", "original_index": 1, "pid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_1"}, "type": "Document"} +{"page_content": " /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n\n\nThis chunk defines the ColumnGeo class and its derived types (ColumnPoint, ColumnRing, ColumnPolygon, ColumnMultiPolygon) within the clickhouse namespace, which are used to represent various geographic data types in the Clickhouse database.", "metadata": {"chunk_id": "doc_29_chunk_2", "original_index": 2, "pid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_2"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate ()(std::declval())),\n typename Value = std::decay_t>\nclass ProjectedIterator {\npublic:\n using value_type = Value;\n using reference = Reference;\n using pointer = Reference;\n using difference_type = typename std::iterator_traits::difference_type;\n using iterator_category = typename std::iterator_traits::iterator_category;\n\n\n\nThe provided chunk is the beginning of a C++ class definition for a `ProjectedIterator` within the `clickhouse` namespace. This class is likely used to wrap an existing iterator and apply a unary function to the dereferenced values, projecting them into a new value type.", "metadata": {"chunk_id": "doc_30_chunk_0", "original_index": 0, "pid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd_0"}, "type": "Document"} +{"page_content": " ProjectedIterator() = default;\n\n inline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\n : iterator_(iterator)\n , functor_(std::move(functor)) {\n }\n\n inline UnaryFunction functor() const { return functor; }\n\n inline Iterator const& base() const { return iterator_; }\n\n inline reference operator*() const { return functor_(iterator_); }\n\n inline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n }\n\n inline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n }\n\n inline bool operator==(const ProjectedIterator& other) const {\n return this->iterator_ == other.iterator_;\n }\n\n inline bool operator!=(const ProjectedIterator& other) const {\n return !(*this == other);\n }\n\nprivate:\n Iterator iterator_;\n UnaryFunction functor_;\n};\n\n} // namespace clickhouse\n\n\nThe provided chunk is the implementation of the `ProjectedIterator` class within the `clickhouse` namespace. This class is used to create an iterator that applies a unary function to the elements of an underlying iterator, effectively projecting the elements into a new form.", "metadata": {"chunk_id": "doc_30_chunk_1", "original_index": 1, "pid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd_1"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n\n\nThis chunk is the source code for a Java enum called `ConsoleOutput` that defines three possible values: `auto`, `rich`, and `plain`. This enum is likely used to control the output format of a console or terminal application.", "metadata": {"chunk_id": "doc_31_chunk_0", "original_index": 0, "pid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8_0"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\n\n\nThe provided chunk is the copyright notice and package declaration for the `UpdateChecker` class, which is part of the `com.google.cloud.tools.jib.plugins.common` package.", "metadata": {"chunk_id": "doc_32_chunk_0", "original_index": 0, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_0"}, "type": "Document"} +{"page_content": "import com.fasterxml.jackson.annotation.JsonIgnoreProperties;\nimport com.google.cloud.tools.jib.api.LogEvent;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.http.Request;\nimport com.google.cloud.tools.jib.http.Response;\nimport com.google.cloud.tools.jib.json.JsonTemplate;\nimport com.google.cloud.tools.jib.json.JsonTemplateMapper;\nimport com.google.cloud.tools.jib.plugins.common.globalconfig.GlobalConfig;\nimport com.google.common.annotations.VisibleForTesting;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardCopyOption;\nimport java.time.Duration;\nimport java.time.Instant;\nimport java.time.format.DateTimeParseException;\nimport java.util.Optional;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Future;\nimport java.util.function.Consumer;\n\n\n\nThe chunk contains the import statements and class definitions used in the `UpdateChecker` class, which is responsible for checking if a tool is up-to-date.", "metadata": {"chunk_id": "doc_32_chunk_1", "original_index": 1, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_1"}, "type": "Document"} +{"page_content": "/** Checks if Jib is up-to-date. */\npublic class UpdateChecker {\n\n private static final String LAST_UPDATE_CHECK_FILENAME = \"lastUpdateCheck\";\n\n /** JSON template for content downloaded during version check. */\n @JsonIgnoreProperties(ignoreUnknown = true)\n private static class VersionJsonTemplate implements JsonTemplate {\n private String latest = \"\";\n }\n\n\n\nThe provided chunk is a part of the `UpdateChecker` class, which is responsible for checking if the Jib tool is up-to-date. The class includes a `VersionJsonTemplate` inner class that represents the JSON template for the content downloaded during the version check.", "metadata": {"chunk_id": "doc_32_chunk_2", "original_index": 2, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_2"}, "type": "Document"} +{"page_content": " /**\n * Begins checking for an update in a separate thread.\n *\n * @param executorService the {@link ExecutorService}\n * @param versionUrl the location to check for the latest version\n * @param toolName the tool name\n * @param toolVersion the tool version\n * @param log {@link Consumer} used to log messages\n * @return a new {@link UpdateChecker}\n */\n public static Future> checkForUpdate(\n ExecutorService executorService,\n String versionUrl,\n String toolName,\n String toolVersion,\n Consumer log) {\n return executorService.submit(\n () ->\n performUpdateCheck(\n GlobalConfig.getConfigDir(), toolVersion, versionUrl, toolName, log));\n }\n\n\n\nThe provided chunk is a part of the `UpdateChecker` class, which is responsible for checking if a tool is up-to-date. The `checkForUpdate` method is a static method that initiates the update check in a separate thread.", "metadata": {"chunk_id": "doc_32_chunk_3", "original_index": 3, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_3"}, "type": "Document"} +{"page_content": " @VisibleForTesting\n static Optional performUpdateCheck(\n Path configDir,\n String currentVersion,\n String versionUrl,\n String toolName,\n Consumer log) {\n Path lastUpdateCheck = configDir.resolve(LAST_UPDATE_CHECK_FILENAME);\n\n try {\n // Check time of last update check\n if (Files.exists(lastUpdateCheck)) {\n try {\n String fileContents =\n new String(Files.readAllBytes(lastUpdateCheck), StandardCharsets.UTF_8);\n Instant modifiedTime = Instant.parse(fileContents);\n if (modifiedTime.plus(Duration.ofDays(1)).isAfter(Instant.now())) {\n return Optional.empty();\n }\n } catch (DateTimeParseException | IOException ex) {\n // If reading update time failed, file might be corrupt, so delete it\n log.accept(LogEvent.debug(\"Failed to read lastUpdateCheck; \" + ex.getMessage()));\n Files.delete(lastUpdateCheck);\n }\n }\n\n\n\nThe provided chunk is a part of the `performUpdateCheck` method in the `UpdateChecker` class, which is responsible for checking if a newer version of the tool is available and updating the last update check timestamp.", "metadata": {"chunk_id": "doc_32_chunk_4", "original_index": 4, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_4"}, "type": "Document"} +{"page_content": " // Check for update\n FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n try {\n Response response =\n httpClient.get(\n new URL(versionUrl),\n Request.builder()\n .setHttpTimeout(3000)\n .setUserAgent(\"jib \" + currentVersion + \" \" + toolName)\n .build());\n VersionJsonTemplate version =\n JsonTemplateMapper.readJson(response.getBody(), VersionJsonTemplate.class);\n\n\n\nThe chunk is part of the `performUpdateCheck` method, which is responsible for checking if a newer version of the tool is available by making an HTTP request to a version URL and parsing the response.", "metadata": {"chunk_id": "doc_32_chunk_5", "original_index": 5, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_5"}, "type": "Document"} +{"page_content": " Path lastUpdateCheckTemp =\n Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null);\n lastUpdateCheckTemp.toFile().deleteOnExit();\n Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8));\n Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING);\n\n if (currentVersion.equals(version.latest)) {\n return Optional.empty();\n }\n return Optional.of(version.latest);\n } finally {\n httpClient.shutDown();\n }\n\n } catch (IOException ex) {\n log.accept(LogEvent.debug(\"Update check failed; \" + ex.getMessage()));\n }\n\n return Optional.empty();\n }\n\n\n\nThis chunk of code is responsible for checking for updates to the Jib tool. It performs the following tasks:\n\n1. Checks the last time an update check was performed, and skips the check if it was done within the last 24 hours.\n2. Fetches the latest version information from a remote URL.\n3. Writes the current timestamp to a temporary file, and then moves it to the final location.\n4. Compares the current version to the latest version, and returns the latest version if it is different.\n5. Handles any exceptions that may occur during the update check process.", "metadata": {"chunk_id": "doc_32_chunk_6", "original_index": 6, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_6"}, "type": "Document"} +{"page_content": " /**\n * Returns the latest Jib version available if the check succeeded and the current version is\n * outdated, or returns {@code Optional.empty()} if the check was interrupted or did not determine\n * that a later version was available.\n *\n * @param updateMessageFuture the {@link Future} returned by {@link UpdateChecker#checkForUpdate}\n * @return the latest version, if found, else {@code Optional.empty()}.\n */\n public static Optional finishUpdateCheck(Future> updateMessageFuture) {\n if (updateMessageFuture.isDone()) {\n try {\n return updateMessageFuture.get();\n } catch (InterruptedException | ExecutionException ex) {\n // No need to restore the interrupted status. The intention here is to silently consume any\n // kind of error\n }\n }\n updateMessageFuture.cancel(true);\n return Optional.empty();\n }\n\n private UpdateChecker() {}\n}\n\n\nThe provided chunk is a part of the `UpdateChecker` class, which is responsible for checking if the Jib tool is up-to-date. The `finishUpdateCheck` method is used to retrieve the latest Jib version available, if the update check was successful and the current version is outdated.", "metadata": {"chunk_id": "doc_32_chunk_7", "original_index": 7, "pid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_7"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n\n\nThe provided chunk is the copyright notice and license information for the Java class `DefaultCredentialRetrieversTest` in the `com.google.cloud.tools.jib.plugins.common` package.", "metadata": {"chunk_id": "doc_33_chunk_0", "original_index": 0, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_0"}, "type": "Document"} +{"page_content": "package com.google.cloud.tools.jib.plugins.common;\n\nimport static com.google.common.truth.Truth.assertThat;\nimport static com.google.common.truth.Truth8.assertThat;\nimport static org.junit.Assert.assertThrows;\nimport static org.mockito.ArgumentMatchers.anyString;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\n\n\nThe provided chunk is the package declaration and import statements for the `DefaultCredentialRetrieversTest` class, which is a test class for the `DefaultCredentialRetrievers` class in the `com.google.cloud.tools.jib.plugins.common` package.", "metadata": {"chunk_id": "doc_33_chunk_1", "original_index": 1, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_1"}, "type": "Document"} +{"page_content": "import com.google.cloud.tools.jib.api.Credential;\nimport com.google.cloud.tools.jib.api.CredentialRetriever;\nimport com.google.cloud.tools.jib.frontend.CredentialRetrieverFactory;\nimport com.google.common.collect.ImmutableMap;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n/** Tests for {@link DefaultCredentialRetrievers}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class DefaultCredentialRetrieversTest {\n\n @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n\n\nThis chunk is part of a test suite for the `DefaultCredentialRetrievers` class, which is responsible for retrieving Docker credentials from various sources.", "metadata": {"chunk_id": "doc_33_chunk_2", "original_index": 2, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_2"}, "type": "Document"} +{"page_content": " @Mock private CredentialRetrieverFactory mockCredentialRetrieverFactory;\n @Mock private CredentialRetriever mockDockerCredentialHelperCredentialRetriever;\n @Mock private CredentialRetriever mockKnownCredentialRetriever;\n @Mock private CredentialRetriever mockInferredCredentialRetriever;\n @Mock private CredentialRetriever mockWellKnownCredentialHelpersCredentialRetriever;\n @Mock private CredentialRetriever mockXdgPrimaryCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvLegacyDockerConfigCredentialRetriever;\n\n\nThis chunk of code defines the mock objects used in the unit tests for the `DefaultCredentialRetrievers` class, which is responsible for retrieving Docker credentials from various sources.", "metadata": {"chunk_id": "doc_33_chunk_3", "original_index": 3, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_3"}, "type": "Document"} +{"page_content": " @Mock private CredentialRetriever mockSystemHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockApplicationDefaultCredentialRetriever;\n\n\n\nThis chunk of code defines several mock CredentialRetriever objects that are used in the DefaultCredentialRetrieversTest class to test the DefaultCredentialRetrievers class.", "metadata": {"chunk_id": "doc_33_chunk_4", "original_index": 4, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_4"}, "type": "Document"} +{"page_content": " private Properties properties;\n private Map environment;\n\n private final Credential knownCredential = Credential.from(\"username\", \"password\");\n private final Credential inferredCredential = Credential.from(\"username2\", \"password2\");\n\n @Before\n public void setUp() {\n properties = new Properties();\n properties.setProperty(\"os.name\", \"unknown\");\n properties.setProperty(\"user.home\", Paths.get(\"/system/home\").toString());\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/docker_config\").toString(),\n \"XDG_RUNTIME_DIR\",\n Paths.get(\"/run/user/1000\").toString(),\n \"XDG_CONFIG_HOME\",\n Paths.get(\"/env/home/.config\").toString());\n\n\n\nThis chunk sets up the properties and environment variables used in the tests for the `DefaultCredentialRetrievers` class.", "metadata": {"chunk_id": "doc_33_chunk_5", "original_index": 5, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_5"}, "type": "Document"} +{"page_content": " when(mockCredentialRetrieverFactory.dockerCredentialHelper(anyString()))\n .thenReturn(mockDockerCredentialHelperCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(knownCredential, \"credentialSource\"))\n .thenReturn(mockKnownCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(inferredCredential, \"inferredCredentialSource\"))\n .thenReturn(mockInferredCredentialRetriever);\n when(mockCredentialRetrieverFactory.wellKnownCredentialHelpers())\n .thenReturn(mockWellKnownCredentialHelpersCredentialRetriever);\n\n\n\nThis chunk sets up the behavior of the mock `CredentialRetrieverFactory` object, which is used to create various `CredentialRetriever` instances for testing the `DefaultCredentialRetrievers` class.", "metadata": {"chunk_id": "doc_33_chunk_6", "original_index": 6, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_6"}, "type": "Document"} +{"page_content": " when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/run/user/1000/containers/auth.json\")))\n .thenReturn(mockXdgPrimaryCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.config/containers/auth.json\")))\n .thenReturn(mockEnvHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.config/containers/auth.json\")))\n .thenReturn(mockSystemHomeXdgCredentialRetriever);\n\n\n\nThis chunk sets up the behavior of the `CredentialRetrieverFactory` mock to return specific `CredentialRetriever` instances for different file paths related to the XDG configuration directory.", "metadata": {"chunk_id": "doc_33_chunk_7", "original_index": 7, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_7"}, "type": "Document"} +{"page_content": " when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/config.json\")))\n .thenReturn(mockDockerConfigEnvDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/.dockerconfigjson\")))\n .thenReturn(mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(Paths.get(\"/docker_config/.dockercfg\")))\n .thenReturn(mockDockerConfigEnvLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/system/home/.docker/config.json\")))\n .thenReturn(mockSystemHomeDockerConfigCredentialRetriever);\n\n\nThis chunk sets up mock credential retrievers for various Docker configuration files, including the standard Docker configuration file, the Kubernetes Docker configuration file, and the legacy Docker configuration file, both in the system home directory and the environment home directory.", "metadata": {"chunk_id": "doc_33_chunk_8", "original_index": 8, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_8"}, "type": "Document"} +{"page_content": " when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockSystemHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/system/home/.docker/.dockercfg\")))\n .thenReturn(mockSystemHomeLegacyDockerConfigCredentialRetriever);\n\n\nThe chunk is part of the setup for the `DefaultCredentialRetrieversTest` class, which tests the `DefaultCredentialRetrievers` class. The chunk sets up mock behavior for the `CredentialRetrieverFactory` to return specific `CredentialRetriever` instances for the system home directory's Docker configuration files.", "metadata": {"chunk_id": "doc_33_chunk_9", "original_index": 9, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_9"}, "type": "Document"} +{"page_content": " when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/env/home/.docker/config.json\")))\n .thenReturn(mockEnvHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockEnvHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/env/home/.docker/.dockercfg\")))\n .thenReturn(mockEnvHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.googleApplicationDefaultCredentials())\n .thenReturn(mockApplicationDefaultCredentialRetriever);\n }\n\n\n\nThis chunk sets up the behavior of the `CredentialRetrieverFactory` mock for various Docker configuration file locations in the user's home directory, as well as the behavior for retrieving Google Application Default Credentials. This is part of the setup for the `DefaultCredentialRetrieversTest` class, which tests the `DefaultCredentialRetrievers` class.", "metadata": {"chunk_id": "doc_33_chunk_10", "original_index": 10, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_10"}, "type": "Document"} +{"page_content": " @Test\n public void testAsList() throws FileNotFoundException {\n List retriever =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retriever)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n\n\nThe provided chunk is a unit test for the `DefaultCredentialRetrievers` class, which is responsible for retrieving Docker credentials from various sources. The test method `testAsList()` verifies that the `asList()` method of `DefaultCredentialRetrievers` returns the expected list of `CredentialRetriever` instances in the correct order.", "metadata": {"chunk_id": "doc_33_chunk_11", "original_index": 11, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_11"}, "type": "Document"} +{"page_content": " mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n\n\nThis chunk represents the list of credential retrievers that are used to retrieve Docker credentials from various locations, including the Docker config file, the Kubernetes Docker config file, and the legacy Docker config file. These retrievers are part of the `DefaultCredentialRetrievers` class, which is responsible for managing the credential retrieval process.", "metadata": {"chunk_id": "doc_33_chunk_12", "original_index": 12, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_12"}, "type": "Document"} +{"page_content": " @Test\n public void testAsList_all() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setKnownCredential(knownCredential, \"credentialSource\")\n .setInferredCredential(inferredCredential, \"inferredCredentialSource\")\n .setCredentialHelper(\"credentialHelperSuffix\")\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockKnownCredentialRetriever,\n mockDockerCredentialHelperCredentialRetriever,\n mockInferredCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n\n\nThis chunk tests the `asList()` method of the `DefaultCredentialRetrievers` class, which retrieves a list of `CredentialRetriever` objects based on various configuration settings, including known credentials, inferred credentials, and a credential helper.", "metadata": {"chunk_id": "doc_33_chunk_13", "original_index": 13, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_13"}, "type": "Document"} +{"page_content": " mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n\n\nThis chunk represents the list of credential retrievers that are used to retrieve Docker credentials from various locations, including the Docker configuration files, well-known credential helpers, and the Google Application Default Credentials. This list is part of the `asList()` method of the `DefaultCredentialRetrievers` class, which is responsible for providing a prioritized list of credential retrievers to be used for retrieving Docker credentials.", "metadata": {"chunk_id": "doc_33_chunk_14", "original_index": 14, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_14"}, "type": "Document"} +{"page_content": " verify(mockCredentialRetrieverFactory).known(knownCredential, \"credentialSource\");\n verify(mockCredentialRetrieverFactory).known(inferredCredential, \"inferredCredentialSource\");\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(\"docker-credential-credentialHelperSuffix\");\n }\n\n\n\nThe chunk verifies that the `DefaultCredentialRetrievers` class sets the known credential, inferred credential, and credential helper as expected.", "metadata": {"chunk_id": "doc_33_chunk_15", "original_index": 15, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_15"}, "type": "Document"} +{"page_content": " @Test\n public void testAsList_credentialHelperPath() throws IOException {\n Path fakeCredentialHelperPath = temporaryFolder.newFile(\"fake-credHelper\").toPath();\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(fakeCredentialHelperPath.toString());\n\n\n\nThe chunk tests the behavior of the `DefaultCredentialRetrievers` class when a credential helper path is specified.", "metadata": {"chunk_id": "doc_33_chunk_16", "original_index": 16, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_16"}, "type": "Document"} +{"page_content": " List retrievers = credentialRetrievers.asList();\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n\n\nThis chunk of code is part of a test suite for the `DefaultCredentialRetrievers` class, which is responsible for retrieving Docker credentials from various sources. The test checks that the `asList()` method of `DefaultCredentialRetrievers` returns the expected list of `CredentialRetriever` instances in the correct order.", "metadata": {"chunk_id": "doc_33_chunk_17", "original_index": 17, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_17"}, "type": "Document"} +{"page_content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(fakeCredentialHelperPath.toString());\n\n\n\nThis chunk is part of a test suite for the `DefaultCredentialRetrievers` class, which is responsible for retrieving Docker credentials from various sources. The chunk specifically tests the retrieval of credentials from Docker configuration files located in different directories, as well as the use of a custom credential helper.", "metadata": {"chunk_id": "doc_33_chunk_18", "original_index": 18, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_18"}, "type": "Document"} +{"page_content": " Files.delete(fakeCredentialHelperPath);\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex)\n .hasMessageThat()\n .isEqualTo(\"Specified credential helper was not found: \" + fakeCredentialHelperPath);\n }\n\n @Test\n public void testDockerConfigRetrievers_undefinedHome() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(\n mockCredentialRetrieverFactory, new Properties(), new HashMap<>())\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n\n\nThis chunk tests the behavior of the `DefaultCredentialRetrievers` class when the credential helper path is not found, and when the user's home directory is undefined.", "metadata": {"chunk_id": "doc_33_chunk_19", "original_index": 19, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_19"}, "type": "Document"} +{"page_content": " @Test\n public void testDockerConfigRetrievers_noDuplicateRetrievers() throws FileNotFoundException {\n properties.setProperty(\"user.home\", Paths.get(\"/env/home\").toString());\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n\n\nThe provided chunk is a test case that verifies the behavior of the `DefaultCredentialRetrievers` class, specifically the retrieval of Docker configuration-based credential retrievers. The test ensures that there are no duplicate credential retrievers in the list returned by the `asList()` method.", "metadata": {"chunk_id": "doc_33_chunk_20", "original_index": 20, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_20"}, "type": "Document"} +{"page_content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n\n\nThis chunk represents the list of credential retrievers that are used to retrieve Docker credentials from various sources, including the Docker config file, the Kubernetes Docker config file, and the legacy Docker config file, as well as well-known credential helpers and the Google Application Default Credentials.", "metadata": {"chunk_id": "doc_33_chunk_21", "original_index": 21, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_21"}, "type": "Document"} +{"page_content": " environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/env/home/.docker\").toString());\n retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockEnvHomeXdgCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n\n\nThis chunk is part of a test suite for the `DefaultCredentialRetrievers` class, which is responsible for retrieving Docker credentials from various sources. The test case in this chunk verifies that the credential retrievers are correctly ordered and that there are no duplicate retrievers when the `HOME` and `DOCKER_CONFIG` environment variables are set to specific values.", "metadata": {"chunk_id": "doc_33_chunk_22", "original_index": 22, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_22"}, "type": "Document"} +{"page_content": " @Test\n public void testCredentialHelper_cmdExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.cmd\").toPath();\n Path pathWithoutCmd = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutCmd.getParent().resolve(\"foo.cmd\"));\n\n\n\nThis chunk tests the behavior of the `DefaultCredentialRetrievers` class when the specified credential helper has a `.cmd` extension on a Windows system.", "metadata": {"chunk_id": "doc_33_chunk_23", "original_index": 23, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_23"}, "type": "Document"} +{"page_content": " DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString())\n .asList();\n\n\n\nThis chunk demonstrates how the DefaultCredentialRetrievers class handles the case where the specified credential helper is not found, and how it adjusts the retrieval process when the operating system is Windows.", "metadata": {"chunk_id": "doc_33_chunk_24", "original_index": 24, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_24"}, "type": "Document"} +{"page_content": " assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n\n\nThis chunk is part of a test case for the `DefaultCredentialRetrievers` class, which is responsible for retrieving Docker credentials from various sources. The test case checks that the list of credential retrievers returned by the `asList()` method includes the expected set of retrievers in the correct order.", "metadata": {"chunk_id": "doc_33_chunk_25", "original_index": 25, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_25"}, "type": "Document"} +{"page_content": " @Test\n public void testCredentialHelper_exeExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.exe\").toPath();\n Path pathWithoutExe = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutExe.getParent().resolve(\"foo.exe\"));\n\n\n\nThis chunk tests the behavior of the `DefaultCredentialRetrievers` class when the specified credential helper has a `.exe` extension on a Windows system.", "metadata": {"chunk_id": "doc_33_chunk_26", "original_index": 26, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_26"}, "type": "Document"} +{"page_content": " DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString())\n .asList();\n\n\n\nThis chunk demonstrates how the DefaultCredentialRetrievers class handles the case where the specified credential helper is not found, and how it adjusts the credential retrieval process when the operating system is Windows.", "metadata": {"chunk_id": "doc_33_chunk_27", "original_index": 27, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_27"}, "type": "Document"} +{"page_content": " assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n}\n\n\nThis chunk is part of a test case for the `DefaultCredentialRetrievers` class, which is responsible for retrieving Docker credentials from various sources. The test case checks that the list of credential retrievers returned by the `asList()` method includes the expected set of retrievers in the correct order.", "metadata": {"chunk_id": "doc_33_chunk_28", "original_index": 28, "pid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_28"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2019 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n\n\nThe provided chunk is the copyright notice at the beginning of the Java source code file for the \"ReproducibleImageTest\" class, which is part of the Jib Core library.", "metadata": {"chunk_id": "doc_34_chunk_0", "original_index": 0, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_0"}, "type": "Document"} +{"page_content": "package com.google.cloud.tools.jib.api;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;\nimport com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;\nimport com.google.cloud.tools.jib.api.buildplan.FilePermissions;\nimport com.google.common.collect.ArrayListMultimap;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Multimap;\nimport com.google.common.io.CharStreams;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\n\n\nThis chunk contains the import statements and the beginning of the ReproducibleImageTest class, which is a test class that verifies the properties of a container image created using the Jib API.", "metadata": {"chunk_id": "doc_34_chunk_1", "original_index": 1, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_1"}, "type": "Document"} +{"page_content": "import java.nio.file.Path;\nimport java.time.Instant;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.BiConsumer;\nimport java.util.zip.GZIPInputStream;\nimport org.apache.commons.compress.archivers.tar.TarArchiveEntry;\nimport org.apache.commons.compress.archivers.tar.TarArchiveInputStream;\nimport org.junit.BeforeClass;\nimport org.junit.ClassRule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\n\n\nThis chunk contains the import statements and class-level declarations for the ReproducibleImageTest class, which is a test suite that verifies the properties of a Docker image created using the Jib API.", "metadata": {"chunk_id": "doc_34_chunk_2", "original_index": 2, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_2"}, "type": "Document"} +{"page_content": "/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n\n\nThis chunk is part of a test suite for the Jib library, which is a Java container image builder. The test suite verifies that the created Docker image has certain properties, such as explicit directory structures, default timestamps, permissions, and file orderings.", "metadata": {"chunk_id": "doc_34_chunk_3", "original_index": 3, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_3"}, "type": "Document"} +{"page_content": " imageTar = new File(imageLocation.getRoot(), \"image.tar\");\n Containerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\n Jib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n }\n\n\n\nThis chunk is part of the `createImage()` method, which is responsible for creating a Docker image using the Jib API. The method sets up the necessary configuration, including the image reference, layers, and file entries, and then containerizes the image.", "metadata": {"chunk_id": "doc_34_chunk_4", "original_index": 4, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_4"}, "type": "Document"} +{"page_content": " @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n\n\nThis chunk tests the structure of the generated Docker image tarball, ensuring that the expected files and directories are present in the correct order.", "metadata": {"chunk_id": "doc_34_chunk_5", "original_index": 5, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_5"}, "type": "Document"} +{"page_content": " @Test\n public void testManifest() throws IOException {\n String expectedManifest =\n \"[{\\\"Config\\\":\\\"config.json\\\",\\\"RepoTags\\\":[\\\"jib-core/reproducible:latest\\\"],\"\n + \"\\\"Layers\\\":[\\\"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\\\",\\\"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\\\",\\\"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\\\"]}]\";\n String generatedManifest = extractFromTarFileAsString(imageTar, \"manifest.json\");\n assertThat(generatedManifest).isEqualTo(expectedManifest);\n }\n\n\n\nThe chunk is a unit test that verifies the structure and contents of the Docker image manifest generated by the Jib library. It checks that the generated manifest matches an expected format and includes the expected layer references.", "metadata": {"chunk_id": "doc_34_chunk_6", "original_index": 6, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_6"}, "type": "Document"} +{"page_content": " @Test\n public void testConfiguration() throws IOException {\n String expectedConfig =\n \"{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\"\n + \"\\\"config\\\":{\\\"Env\\\":[],\\\"Entrypoint\\\":[\\\"echo\\\",\\\"Hello World\\\"],\\\"ExposedPorts\\\":{},\\\"Labels\\\":{},\\\"Volumes\\\":{}},\"\n + \"\\\"history\\\":[{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"}],\"\n\n\nThe chunk is a test case that verifies the configuration of the generated Docker image, including the creation timestamp, architecture, operating system, environment variables, entrypoint, exposed ports, labels, and volumes.", "metadata": {"chunk_id": "doc_34_chunk_7", "original_index": 7, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_7"}, "type": "Document"} +{"page_content": " + \"\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:18e4f44e6d1835bd968339b166057bd17ab7d4cbb56dc7262a5cafea7cf8d405\\\",\\\"sha256:13369c34f073f2b9c1fa6431e23d925f1a8eac65b1726c8cc8fcc2596c69b414\\\",\\\"sha256:4f92c507112d7880ca0f504ef8272b7fdee107263270125036a260a741565923\\\"]}}\";\n String generatedConfig = extractFromTarFileAsString(imageTar, \"config.json\");\n assertThat(generatedConfig).isEqualTo(expectedConfig);\n }\n\n @Test\n public void testImageLayout() throws IOException {\n Set paths = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n paths.add(layerEntry.getName());\n }\n });\n assertThat(paths)\n .containsExactly(\n \"app/fileA.txt\",\n \"app/fileB.txt\",\n \"app/fileC.txt\",\n \"app/fileD.txt\",\n \"app/subdir/fileE.txt\");\n }\n\n\n\nThis chunk of code is part of a test suite that verifies the structure and properties of a Docker image created using the Jib library. The tests check the contents of the image's configuration file, the ordering and permissions of the files in the image layers, and the overall structure of the image.", "metadata": {"chunk_id": "doc_34_chunk_8", "original_index": 8, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_8"}, "type": "Document"} +{"page_content": " @Test\n public void testAllFileAndDirectories() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) ->\n assertThat(layerEntry.isFile() || layerEntry.isDirectory()).isTrue());\n }\n\n @Test\n public void testTimestampsEpochPlus1s() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) -> {\n Instant modificationTime = layerEntry.getLastModifiedDate().toInstant();\n assertThat(modificationTime).isEqualTo(Instant.ofEpochSecond(1));\n });\n }\n\n\n\nThis chunk tests that the created image has all files and directories, and that the timestamps of the files are set to the epoch plus one second.", "metadata": {"chunk_id": "doc_34_chunk_9", "original_index": 9, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_9"}, "type": "Document"} +{"page_content": " @Test\n public void testPermissions() throws IOException {\n assertThat(FilePermissions.DEFAULT_FILE_PERMISSIONS.getPermissionBits()).isEqualTo(0644);\n assertThat(FilePermissions.DEFAULT_FOLDER_PERMISSIONS.getPermissionBits()).isEqualTo(0755);\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0644);\n } else if (layerEntry.isDirectory()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0755);\n }\n });\n }\n\n\n\nThis chunk tests that the created image has the expected file and directory permissions, specifically that files have permissions 0644 and directories have permissions 0755.", "metadata": {"chunk_id": "doc_34_chunk_10", "original_index": 10, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_10"}, "type": "Document"} +{"page_content": " @Test\n public void testNoImplicitParentDirectories() throws IOException {\n Set directories = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n String entryPath = layerEntry.getName();\n if (layerEntry.isDirectory()) {\n assertThat(entryPath.endsWith(\"/\")).isTrue();\n entryPath = entryPath.substring(0, entryPath.length() - 1);\n }\n\n int lastSlashPosition = entryPath.lastIndexOf('/');\n String parent = entryPath.substring(0, Math.max(0, lastSlashPosition));\n if (!parent.isEmpty()) {\n assertThat(directories.contains(parent)).isTrue();\n }\n if (layerEntry.isDirectory()) {\n directories.add(entryPath);\n }\n });\n }\n\n\n\nThis chunk tests that the image created by the Jib API does not have any implicit parent directories, ensuring that only explicitly defined directories are present in the final image.", "metadata": {"chunk_id": "doc_34_chunk_11", "original_index": 11, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_11"}, "type": "Document"} +{"page_content": " @Test\n public void testFileOrdering() throws IOException {\n Multimap layerPaths = ArrayListMultimap.create();\n layerEntriesDo((layerName, layerEntry) -> layerPaths.put(layerName, layerEntry.getName()));\n for (Collection paths : layerPaths.asMap().values()) {\n List sorted = new ArrayList<>(paths);\n // ReproducibleLayerBuilder sorts by TarArchiveEntry::getName()\n Collections.sort(sorted);\n assertThat(paths).containsExactlyElementsIn(sorted).inOrder();\n }\n }\n\n private void layerEntriesDo(BiConsumer layerConsumer)\n throws IOException {\n\n\n\nThis chunk tests the file ordering within the image layers, ensuring that the files are sorted in the expected order. The `layerEntriesDo` method is a helper method that iterates through the layers in the image tar file and performs the necessary checks.", "metadata": {"chunk_id": "doc_34_chunk_12", "original_index": 12, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_12"}, "type": "Document"} +{"page_content": " try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n String imageEntryName = imageEntry.getName();\n // assume all .tar.gz files are layers\n if (imageEntry.isFile() && imageEntryName.endsWith(\".tar.gz\")) {\n @SuppressWarnings(\"resource\") // must not close sub-streams\n TarArchiveInputStream layer = new TarArchiveInputStream(new GZIPInputStream(input));\n TarArchiveEntry layerEntry;\n while ((layerEntry = layer.getNextTarEntry()) != null) {\n layerConsumer.accept(imageEntryName, layerEntry);\n }\n }\n }\n }\n }\n\n\n\nThe provided chunk is part of a test suite that verifies the structure and properties of a Docker image created using the Jib library. The chunk iterates through the layers of the image tarball and performs various assertions on the contents of each layer.", "metadata": {"chunk_id": "doc_34_chunk_13", "original_index": 13, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_13"}, "type": "Document"} +{"page_content": " private static String extractFromTarFileAsString(File tarFile, String filename)\n throws IOException {\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(tarFile.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n if (filename.equals(imageEntry.getName())) {\n return CharStreams.toString(new InputStreamReader(input, StandardCharsets.UTF_8));\n }\n }\n }\n throw new AssertionError(\"file not found: \" + filename);\n }\n}\n\n\nThe provided chunk is a private static method named `extractFromTarFileAsString` that extracts the contents of a file with a given filename from a tar file and returns it as a string. This method is used within the `ReproducibleImageTest` class to verify the contents of the generated image configuration and manifest.", "metadata": {"chunk_id": "doc_34_chunk_14", "original_index": 14, "pid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_14"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n\n\nJava source code for a simple \"Hello World\" program.", "metadata": {"chunk_id": "doc_35_chunk_0", "original_index": 0, "pid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0_0"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n\n\nThe provided chunk is the copyright notice for the Java class `MavenSettingsServerCredentialsTest` in the `com.google.cloud.tools.jib.maven` package.", "metadata": {"chunk_id": "doc_36_chunk_0", "original_index": 0, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_0"}, "type": "Document"} +{"page_content": "package com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n\n\nThis chunk is the beginning of a test suite for the `MavenSettingsServerCredentials` class, which is part of the Jib Maven plugin. The test suite verifies the behavior of the `inferAuth` method, which attempts to infer authentication credentials from Maven settings.", "metadata": {"chunk_id": "doc_36_chunk_1", "original_index": 1, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_1"}, "type": "Document"} +{"page_content": " private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n\n\nThis chunk defines the private member variables and file paths used in the `MavenSettingsServerCredentialsTest` class, which is testing the `MavenSettingsServerCredentials` class.", "metadata": {"chunk_id": "doc_36_chunk_2", "original_index": 2, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_2"}, "type": "Document"} +{"page_content": " @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n\n\nThis chunk of code is part of a test suite for the `MavenSettingsServerCredentials` class, which is responsible for handling server credentials in Maven settings files. The chunk includes a setup method that initializes the test environment, and a test method that verifies the behavior of the `inferAuth` method when the settings decrypter fails to decrypt the server information.", "metadata": {"chunk_id": "doc_36_chunk_3", "original_index": 3, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_3"}, "type": "Document"} +{"page_content": " @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n\n\nThis chunk tests the successful retrieval of encrypted and unencrypted server credentials from Maven settings files.", "metadata": {"chunk_id": "doc_36_chunk_4", "original_index": 4, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_4"}, "type": "Document"} +{"page_content": " @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n\n\nThe provided chunk contains test cases that verify the behavior of the `MavenSettingsServerCredentials` class when inferring authentication credentials from Maven settings files. The tests cover scenarios where the settings file contains encrypted and unencrypted server credentials, as well as cases where the settings file does not have a master password set.", "metadata": {"chunk_id": "doc_36_chunk_5", "original_index": 5, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_5"}, "type": "Document"} +{"page_content": " @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n\n\nThe provided chunk tests the behavior of the `MavenSettingsServerCredentials` class when inferring authentication credentials for a registry with a host but no port specified.", "metadata": {"chunk_id": "doc_36_chunk_6", "original_index": 6, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_6"}, "type": "Document"} +{"page_content": " @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n\n\nThis chunk tests the behavior of the `MavenSettingsServerCredentials` class when attempting to infer authentication credentials for a registry with a specified port, as well as when the requested server is not found in the Maven settings.", "metadata": {"chunk_id": "doc_36_chunk_7", "original_index": 7, "pid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_7"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.registry;\n\n\n\nThe provided chunk is the copyright notice and package declaration for the BlobPullerIntegrationTest class, which is part of the com.google.cloud.tools.jib.registry package.", "metadata": {"chunk_id": "doc_37_chunk_0", "original_index": 0, "pid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_0"}, "type": "Document"} +{"page_content": "import com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.blob.Blob;\nimport com.google.cloud.tools.jib.event.EventHandlers;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.image.json.V22ManifestTemplate;\nimport com.google.common.io.ByteStreams;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.concurrent.atomic.LongAdder;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\n\nThe chunk contains the import statements and the test methods for the `BlobPullerIntegrationTest` class, which is part of the `com.google.cloud.tools.jib.registry` package.", "metadata": {"chunk_id": "doc_37_chunk_1", "original_index": 1, "pid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_1"}, "type": "Document"} +{"page_content": "/** Integration tests for {@link BlobPuller}. */\npublic class BlobPullerIntegrationTest {\n\n private final FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n\n @Test\n public void testPull() throws IOException, RegistryException {\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n V22ManifestTemplate manifestTemplate =\n registryClient\n .pullManifest(\n ManifestPullerIntegrationTest.KNOWN_MANIFEST_V22_SHA, V22ManifestTemplate.class)\n .getManifest();\n\n DescriptorDigest realDigest = manifestTemplate.getLayers().get(0).getDigest();\n\n\n\nThe provided chunk is part of an integration test suite for the `BlobPuller` class, which is responsible for pulling blobs from a container registry. The test suite includes two test cases: one that successfully pulls a known blob, and another that attempts to pull a non-existent blob.", "metadata": {"chunk_id": "doc_37_chunk_2", "original_index": 2, "pid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_2"}, "type": "Document"} +{"page_content": " // Pulls a layer BLOB of the distroless/base image.\n LongAdder totalByteCount = new LongAdder();\n LongAdder expectedSize = new LongAdder();\n Blob pulledBlob =\n registryClient.pullBlob(\n realDigest,\n size -> {\n Assert.assertEquals(0, expectedSize.sum());\n expectedSize.add(size);\n },\n totalByteCount::add);\n Assert.assertEquals(realDigest, pulledBlob.writeTo(ByteStreams.nullOutputStream()).getDigest());\n Assert.assertTrue(expectedSize.sum() > 0);\n Assert.assertEquals(expectedSize.sum(), totalByteCount.sum());\n }\n\n\n\nThis chunk demonstrates the process of pulling a layer blob from a container registry using the `BlobPuller` class. It includes code that pulls the blob, verifies the digest, and checks the expected and total byte counts.", "metadata": {"chunk_id": "doc_37_chunk_3", "original_index": 3, "pid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_3"}, "type": "Document"} +{"page_content": " @Test\n public void testPull_unknownBlob() throws IOException, DigestException {\n DescriptorDigest nonexistentDigest =\n DescriptorDigest.fromHash(\n \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\");\n\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n\n try {\n registryClient\n .pullBlob(nonexistentDigest, ignored -> {}, ignored -> {})\n .writeTo(ByteStreams.nullOutputStream());\n Assert.fail(\"Trying to pull nonexistent blob should have errored\");\n\n } catch (IOException ex) {\n if (!(ex.getCause() instanceof RegistryErrorException)) {\n throw ex;\n }\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"pull BLOB for gcr.io/distroless/base with digest \" + nonexistentDigest));\n }\n }\n}\n\n\nThis chunk of code is a test case for the `BlobPuller` class, which is responsible for pulling blobs from a container registry. The test case specifically checks the behavior of the `pullBlob` method when attempting to pull a blob with an unknown digest.", "metadata": {"chunk_id": "doc_37_chunk_4", "original_index": 4, "pid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_4"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\n\n\nThe provided chunk is the copyright notice and package declaration for the `JibBuildRunnerTest` class, which is part of the `com.google.cloud.tools.jib.plugins.common` package.", "metadata": {"chunk_id": "doc_38_chunk_0", "original_index": 0, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_0"}, "type": "Document"} +{"page_content": "import com.google.api.client.http.HttpResponseException;\nimport com.google.api.client.http.HttpStatusCodes;\nimport com.google.cloud.tools.jib.api.CacheDirectoryCreationException;\nimport com.google.cloud.tools.jib.api.Containerizer;\nimport com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.ImageReference;\nimport com.google.cloud.tools.jib.api.InsecureRegistryException;\nimport com.google.cloud.tools.jib.api.JibContainer;\nimport com.google.cloud.tools.jib.api.JibContainerBuilder;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.api.RegistryUnauthorizedException;\nimport com.google.cloud.tools.jib.registry.RegistryCredentialsNotSentException;\nimport com.google.common.collect.ImmutableSet;\nimport java.io.IOException;\nimport java.net.UnknownHostException;\n\n\nThe provided chunk contains import statements for various classes and interfaces used in the JibBuildRunnerTest class, which is a test class for the JibBuildRunner class. The JibBuildRunner class is part of the Jib Plugins Common library, which is used in the Jib plugins for building container images.", "metadata": {"chunk_id": "doc_38_chunk_1", "original_index": 1, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_1"}, "type": "Document"} +{"page_content": "import java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport org.apache.http.conn.HttpHostConnectException;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.Mockito;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n\n\nThe provided chunk contains the import statements and test setup code for the `JibBuildRunnerTest` class, which is testing the `JibBuildRunner` class in the `com.google.cloud.tools.jib.plugins.common` package.", "metadata": {"chunk_id": "doc_38_chunk_2", "original_index": 2, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_2"}, "type": "Document"} +{"page_content": "/** Tests for {@link JibBuildRunner}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class JibBuildRunnerTest {\n\n private static final HelpfulSuggestions TEST_HELPFUL_SUGGESTIONS =\n new HelpfulSuggestions(\n \"messagePrefix\", \"clearCacheCommand\", \"toConfig\", \"toFlag\", \"buildFile\");\n\n @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Mock private JibContainerBuilder mockJibContainerBuilder;\n @Mock private JibContainer mockJibContainer;\n @Mock private Containerizer mockContainerizer;\n @Mock private RegistryUnauthorizedException mockRegistryUnauthorizedException;\n @Mock private RegistryCredentialsNotSentException mockRegistryCredentialsNotSentException;\n @Mock private HttpResponseException mockHttpResponseException;\n\n\n\nThis chunk contains the test class for the `JibBuildRunner` class, which is responsible for building and running Docker containers using the Jib library. The test class sets up mocks for various dependencies and tests the behavior of the `JibBuildRunner` class in different scenarios, such as when encountering various exceptions during the build process.", "metadata": {"chunk_id": "doc_38_chunk_3", "original_index": 3, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_3"}, "type": "Document"} +{"page_content": " private JibBuildRunner testJibBuildRunner;\n\n @Before\n public void setUpMocks() {\n testJibBuildRunner =\n new JibBuildRunner(\n mockJibContainerBuilder,\n mockContainerizer,\n ignored -> {},\n TEST_HELPFUL_SUGGESTIONS,\n \"ignored\",\n \"ignored\");\n }\n\n @Test\n public void testBuildImage_pass()\n throws BuildStepsExecutionException, IOException, CacheDirectoryCreationException {\n JibContainer buildResult = testJibBuildRunner.runBuild();\n Assert.assertNull(buildResult);\n }\n\n\n\nThe provided chunk is part of a test suite for the `JibBuildRunner` class, which is responsible for running the build process for a container image using the Jib library. The chunk sets up the test environment, including mocking the necessary dependencies, and includes a test case that verifies the successful execution of the `runBuild()` method.", "metadata": {"chunk_id": "doc_38_chunk_4", "original_index": 4, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_4"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_httpHostConnectException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n HttpHostConnectException mockHttpHostConnectException =\n Mockito.mock(HttpHostConnectException.class);\n Mockito.doThrow(mockHttpHostConnectException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forHttpHostConnect(), ex.getMessage());\n }\n }\n\n\n\nThe chunk tests the behavior of the `JibBuildRunner` class when an `HttpHostConnectException` is thrown during the build process, and verifies that the appropriate error message is returned.", "metadata": {"chunk_id": "doc_38_chunk_5", "original_index": 5, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_5"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_unknownHostException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n UnknownHostException mockUnknownHostException = Mockito.mock(UnknownHostException.class);\n Mockito.doThrow(mockUnknownHostException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forUnknownHost(), ex.getMessage());\n }\n }\n\n\n\nThe chunk is a test case that verifies the behavior of the `JibBuildRunner` class when an `UnknownHostException` is thrown during the build process. The test mocks the `JibContainerBuilder` to throw the exception and then checks that the `BuildStepsExecutionException` thrown by the `JibBuildRunner` contains the expected error message.", "metadata": {"chunk_id": "doc_38_chunk_6", "original_index": 6, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_6"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_insecureRegistryException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n InsecureRegistryException mockInsecureRegistryException =\n Mockito.mock(InsecureRegistryException.class);\n Mockito.doThrow(mockInsecureRegistryException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forInsecureRegistry(), ex.getMessage());\n }\n }\n\n\n\nThe chunk tests the behavior of the `JibBuildRunner` class when an `InsecureRegistryException` is thrown during the build process.", "metadata": {"chunk_id": "doc_38_chunk_7", "original_index": 7, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_7"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_registryUnauthorizedException_statusCodeForbidden()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode())\n .thenReturn(HttpStatusCodes.STATUS_CODE_FORBIDDEN);\n\n\n\nThe chunk is part of a test suite for the `JibBuildRunner` class, which is responsible for building and containerizing images using the Jib library. The specific test case is checking the behavior of the `JibBuildRunner` when a `RegistryUnauthorizedException` is thrown with a status code of 403 (Forbidden).", "metadata": {"chunk_id": "doc_38_chunk_8", "original_index": 8, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_8"}, "type": "Document"} +{"page_content": " Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forHttpStatusCodeForbidden(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n\n\nThe chunk is part of a test case that verifies the behavior of the `JibBuildRunner` class when a `RegistryUnauthorizedException` is thrown during the build process. The test case checks that the appropriate error message is displayed when the exception is caused by a 403 Forbidden HTTP status code.", "metadata": {"chunk_id": "doc_38_chunk_9", "original_index": 9, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_9"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_registryUnauthorizedException_noCredentials()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode()).thenReturn(-1); // Unknown\n\n Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forNoCredentialsDefined(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n\n\nThis chunk tests the behavior of the `JibBuildRunner` class when a `RegistryUnauthorizedException` is thrown due to a lack of credentials for the target registry.", "metadata": {"chunk_id": "doc_38_chunk_10", "original_index": 10, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_10"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_registryCredentialsNotSentException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(mockRegistryCredentialsNotSentException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forCredentialsNotSent(), ex.getMessage());\n }\n }\n\n\n\nThe chunk is testing the behavior of the `JibBuildRunner` class when a `RegistryCredentialsNotSentException` is thrown during the build process. It verifies that the appropriate error message is provided to the user in this scenario.", "metadata": {"chunk_id": "doc_38_chunk_11", "original_index": 11, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_11"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_other()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(new RegistryException(\"messagePrefix\"))\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.none(), ex.getMessage());\n }\n }\n\n\n\nThis chunk tests the behavior of the `JibBuildRunner` class when a `RegistryException` is thrown during the build process, and verifies that the appropriate error message is returned.", "metadata": {"chunk_id": "doc_38_chunk_12", "original_index": 12, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_12"}, "type": "Document"} +{"page_content": " @Test\n public void testBuildImage_writesImageJson() throws Exception {\n final ImageReference targetImageReference = ImageReference.parse(\"gcr.io/distroless/java:11\");\n final String imageId =\n \"sha256:61bb3ec31a47cb730eb58a38bbfa813761a51dca69d10e39c24c3d00a7b2c7a9\";\n final String digest = \"sha256:3f1be7e19129edb202c071a659a4db35280ab2bb1a16f223bfd5d1948657b6fc\";\n final Set tags = ImmutableSet.of(\"latest\", \"0.1.41-69d10e-20200116T101403\");\n\n final Path outputPath = temporaryFolder.newFile(\"jib-image.json\").toPath();\n\n\n\nThe chunk is a test case that verifies the functionality of the `JibBuildRunner` class, specifically the `writeImageJson` method, which writes the metadata of a built container image to a JSON file.", "metadata": {"chunk_id": "doc_38_chunk_13", "original_index": 13, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_13"}, "type": "Document"} +{"page_content": " Mockito.when(mockJibContainer.getTargetImage()).thenReturn(targetImageReference);\n Mockito.when(mockJibContainer.getImageId()).thenReturn(DescriptorDigest.fromDigest(imageId));\n Mockito.when(mockJibContainer.getDigest()).thenReturn(DescriptorDigest.fromDigest(digest));\n Mockito.when(mockJibContainer.getTags()).thenReturn(tags);\n Mockito.when(mockJibContainerBuilder.containerize(mockContainerizer))\n .thenReturn(mockJibContainer);\n Mockito.when(mockJibContainer.isImagePushed()).thenReturn(true);\n testJibBuildRunner.writeImageJson(outputPath).runBuild();\n\n\n\nThe provided chunk is part of a test case that verifies the behavior of the `JibBuildRunner` class. Specifically, this chunk sets up the expected behavior of the `mockJibContainer` and `mockJibContainerBuilder` objects, and then calls the `writeImageJson` and `runBuild` methods of the `testJibBuildRunner` instance.", "metadata": {"chunk_id": "doc_38_chunk_14", "original_index": 14, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_14"}, "type": "Document"} +{"page_content": " final String outputJson = new String(Files.readAllBytes(outputPath), StandardCharsets.UTF_8);\n final ImageMetadataOutput metadataOutput = ImageMetadataOutput.fromJson(outputJson);\n Assert.assertEquals(targetImageReference.toString(), metadataOutput.getImage());\n Assert.assertEquals(imageId, metadataOutput.getImageId());\n Assert.assertEquals(digest, metadataOutput.getImageDigest());\n Assert.assertEquals(tags, ImmutableSet.copyOf(metadataOutput.getTags()));\n Assert.assertTrue(metadataOutput.isImagePushed());\n }\n}\n\n\nThe provided chunk is part of a test case that verifies the functionality of writing image metadata to a JSON file in the `JibBuildRunner` class.", "metadata": {"chunk_id": "doc_38_chunk_15", "original_index": 15, "pid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_15"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n\n\nThe provided chunk is the copyright notice for the Java class `BuildDockerMojoIntegrationTest` in the `com.google.cloud.tools.jib.maven` package.", "metadata": {"chunk_id": "doc_39_chunk_0", "original_index": 0, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_0"}, "type": "Document"} +{"page_content": "package com.google.cloud.tools.jib.maven;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.Command;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.Arrays;\nimport org.apache.maven.it.VerificationException;\nimport org.apache.maven.it.Verifier;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Assume;\nimport org.junit.ClassRule;\nimport org.junit.Test;\n\n/** Integration tests for {@link BuildDockerMojo}. */\npublic class BuildDockerMojoIntegrationTest {\n\n @ClassRule public static final TestProject simpleTestProject = new TestProject(\"simple\");\n\n @ClassRule public static final TestProject emptyTestProject = new TestProject(\"empty\");\n\n @ClassRule\n public static final TestProject defaultTargetTestProject = new TestProject(\"default-target\");\n\n\n\nThis chunk contains the integration tests for the `BuildDockerMojo` class in the `com.google.cloud.tools.jib.maven` package.", "metadata": {"chunk_id": "doc_39_chunk_1", "original_index": 1, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_1"}, "type": "Document"} +{"page_content": " private static void buildToDockerDaemon(TestProject project, String imageReference, String pomXml)\n throws VerificationException, DigestException, IOException {\n Verifier verifier = new Verifier(project.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", imageReference);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=\" + pomXml);\n verifier.executeGoal(\"package\");\n\n\n\nThe provided chunk is a method that builds a Docker image using the Jib Maven plugin. It is part of the integration tests for the `BuildDockerMojo` class, which is responsible for building Docker images using the Jib Maven plugin.", "metadata": {"chunk_id": "doc_39_chunk_2", "original_index": 2, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_2"}, "type": "Document"} +{"page_content": " verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyErrorFreeLog();\n\n BuildImageMojoIntegrationTest.readDigestFile(\n project.getProjectRoot().resolve(\"target/jib-image.digest\"));\n }\n\n /**\n * Builds and runs jib:buildDocker on a project at {@code projectRoot} pushing to {@code\n * imageReference}.\n */\n private static String buildToDockerDaemonAndRun(TestProject project, String imageReference)\n throws VerificationException, IOException, InterruptedException, DigestException {\n buildToDockerDaemon(project, imageReference, \"pom.xml\");\n\n\n\nThe chunk is part of an integration test for the `BuildDockerMojo` class, which is responsible for building Docker images using the Jib Maven plugin. The test methods in this class verify the behavior of the `BuildDockerMojo` by executing the `jib:dockerBuild` goal and checking the resulting Docker images.", "metadata": {"chunk_id": "doc_39_chunk_3", "original_index": 3, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_3"}, "type": "Document"} +{"page_content": " String dockerInspectVolumes =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Volumes}}'\", imageReference).run();\n String dockerInspectExposedPorts =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.ExposedPorts}}'\", imageReference)\n .run();\n String dockerInspectLabels =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Labels}}'\", imageReference).run();\n String history = new Command(\"docker\", \"history\", imageReference).run();\n\n MatcherAssert.assertThat(\n dockerInspectVolumes, CoreMatchers.containsString(\"\\\"/var/log\\\":{},\\\"/var/log2\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectExposedPorts,\n CoreMatchers.containsString(\n \"\\\"1000/tcp\\\":{},\\\"2000/udp\\\":{},\\\"2001/udp\\\":{},\\\"2002/udp\\\":{},\\\"2003/udp\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectLabels,\n CoreMatchers.containsString(\"\\\"key1\\\":\\\"value1\\\",\\\"key2\\\":\\\"value2\\\"\"));\n\n\n\nThe chunk is part of the `buildToDockerDaemonAndRun` method, which is responsible for building and running the Docker image using the Jib Maven plugin.", "metadata": {"chunk_id": "doc_39_chunk_4", "original_index": 4, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_4"}, "type": "Document"} +{"page_content": " return new Command(\"docker\", \"run\", \"--rm\", imageReference).run();\n }\n\n @Test\n public void testExecute_simple()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\n \"Hello, world. An argument.\\n1970-01-01T00:00:01Z\\nrw-r--r--\\nrw-r--r--\\nfoo\\ncat\\n\"\n + \"1970-01-01T00:00:01Z\\n1970-01-01T00:00:01Z\\n\",\n buildToDockerDaemonAndRun(simpleTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n\n\nThis chunk is part of an integration test suite for the `BuildDockerMojo` class, which is responsible for building Docker images using the Jib Maven plugin. The test case `testExecute_simple()` verifies that the plugin can successfully build and run a simple Docker image.", "metadata": {"chunk_id": "doc_39_chunk_5", "original_index": 5, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_5"}, "type": "Document"} +{"page_content": " @Test\n public void testExecute_simple_extraDirectoriesFiltering()\n throws DigestException, IOException, InterruptedException, VerificationException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-extra-dirs-filtering.xml\");\n String output =\n new Command(\"docker\", \"run\", \"--rm\", \"--entrypoint=ls\", targetImage, \"-1R\", \"/extras\")\n .run();\n\n // /extras/cat.txt\n // /extras/foo\n // /extras/sub/\n // /extras/sub/a.json\n assertThat(output).isEqualTo(\"/extras:\\ncat.txt\\nfoo\\nsub\\n\\n/extras/sub:\\na.json\\n\");\n }\n\n @Test\n public void testExecute_dockerClient()\n throws VerificationException, IOException, InterruptedException {\n Assume.assumeFalse(System.getProperty(\"os.name\").startsWith(\"Windows\"));\n new Command(\n \"chmod\", \"+x\", simpleTestProject.getProjectRoot().resolve(\"mock-docker.sh\").toString())\n .run();\n\n\n\nThis chunk tests the behavior of the BuildDockerMojo class, specifically the handling of extra directories and the use of a mock Docker client.", "metadata": {"chunk_id": "doc_39_chunk_6", "original_index": 6, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_6"}, "type": "Document"} +{"page_content": " String targetImage = \"simpleimage:maven\" + System.nanoTime();\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-dockerclient.xml\");\n verifier.addCliOption(\"--debug\");\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyTextInLog(\"Docker load called. value1 value2\");\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_empty()\n throws InterruptedException, IOException, VerificationException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n\n\n\nThis chunk is part of an integration test for the `BuildDockerMojo` class, which is responsible for building Docker images using the Jib Maven plugin. The chunk specifically tests the execution of the `jib:dockerBuild` goal when the Docker client is configured.", "metadata": {"chunk_id": "doc_39_chunk_7", "original_index": 7, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_7"}, "type": "Document"} +{"page_content": " Assert.assertEquals(\"\", buildToDockerDaemonAndRun(emptyTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_defaultTarget()\n throws VerificationException, IOException, InterruptedException, DigestException {\n Assert.assertEquals(\n \"Hello, world. An argument.\\n\",\n buildToDockerDaemonAndRun(\n defaultTargetTestProject, \"default-target-name:default-target-version\"));\n }\n\n\n\nThis chunk tests the execution of the BuildDockerMojo, specifically for an empty project and a project with a default target.", "metadata": {"chunk_id": "doc_39_chunk_8", "original_index": 8, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_8"}, "type": "Document"} +{"page_content": " @Test\n public void testExecute_jibSkip() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibSkip(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_jibContainerizeSkips() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibContainerizeSkips(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_userNumeric()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom.xml\");\n Assert.assertEquals(\n \"12345:54321\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n\n\nThis chunk contains tests for the BuildDockerMojo class, specifically testing the behavior when the jib plugin is skipped, when jib containerization is skipped, and when the user is specified as a numeric value.", "metadata": {"chunk_id": "doc_39_chunk_9", "original_index": 9, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_9"}, "type": "Document"} +{"page_content": " @Test\n public void testExecute_userNames()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"brokenuserimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom-broken-user.xml\");\n Assert.assertEquals(\n \"myuser:mygroup\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_noToImageAndInvalidProjectName()\n throws DigestException, VerificationException, IOException, InterruptedException {\n buildToDockerDaemon(simpleTestProject, \"image reference ignored\", \"pom-no-to-image.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", \"my-artifact-id:1\").run());\n }\n\n\n\nThis chunk tests the behavior of the BuildDockerMojo class in the Jib Maven plugin, specifically the handling of user names and the case where the target image is not specified in the pom.xml file.", "metadata": {"chunk_id": "doc_39_chunk_10", "original_index": 10, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_10"}, "type": "Document"} +{"page_content": " @Test\n public void testExecute_jarContainerization()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"jarcontainerizationimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-jar-containerization.xml\");\n Assert.assertEquals(\n \"Hello, world. \\nImplementation-Title: hello-world\\nImplementation-Version: 1\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testExecute_jarContainerizationOnMissingJar() throws IOException {\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"_TARGET_IMAGE\", \"erroronmissingjar\");\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-jar-containerization.xml\");\n verifier.executeGoals(Arrays.asList(\"clean\", \"jib:dockerBuild\"));\n Assert.fail();\n\n\n\nThis chunk tests the behavior of the BuildDockerMojo class when containerizing a JAR file, including the case where the JAR file is missing.", "metadata": {"chunk_id": "doc_39_chunk_11", "original_index": 11, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_11"}, "type": "Document"} +{"page_content": " } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"Obtaining project build output files failed; make sure you have packaged your \"\n + \"project before trying to build the image. (Did you accidentally run \\\"mvn \"\n + \"clean jib:build\\\" instead of \\\"mvn clean package jib:build\\\"?)\"));\n }\n }\n\n @Test\n public void testExecute_jibRequireVersion_ok() throws VerificationException, IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n\n\nThis chunk is part of the integration tests for the `BuildDockerMojo` class, which is responsible for building Docker images using the Jib Maven plugin. The chunk specifically tests the behavior when the required Jib version is not met, and when the required version is met.", "metadata": {"chunk_id": "doc_39_chunk_12", "original_index": 12, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_12"}, "type": "Document"} +{"page_content": " Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n // this plugin should match 1.0\n verifier.setSystemProperty(\"jib.requiredVersion\", \"1.0\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_jibRequireVersion_fail() throws IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n\n\nThis chunk of code is part of the integration tests for the `BuildDockerMojo` class in the Jib Maven plugin. It tests the behavior of the plugin when the required version of the Jib plugin is specified, both when the version matches and when it does not.", "metadata": {"chunk_id": "doc_39_chunk_13", "original_index": 13, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_13"}, "type": "Document"} +{"page_content": " try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.requiredVersion\", \"[,1.0]\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n Assert.fail();\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(), CoreMatchers.containsString(\"but is required to be [,1.0]\"));\n }\n }\n\n\n\nThis chunk is part of a test case that verifies the behavior of the `BuildDockerMojo` class when the required version of the Jib plugin is not met.", "metadata": {"chunk_id": "doc_39_chunk_14", "original_index": 14, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_14"}, "type": "Document"} +{"page_content": " @Test\n public void testCredHelperConfigurationSimple()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-1.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testCredHelperConfigurationComplex()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-2.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n}\n\n\nThis chunk tests the configuration of credential helpers for the BuildDockerMojo class, which is responsible for building Docker images using the Jib Maven plugin.", "metadata": {"chunk_id": "doc_39_chunk_15", "original_index": 15, "pid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_15"}, "type": "Document"} +{"page_content": "/*\n * Copyright 2017 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n\n\nCopyright notice for the Java class RegistryAuthenticationFailedException.", "metadata": {"chunk_id": "doc_40_chunk_0", "original_index": 0, "pid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09_0"}, "type": "Document"} +{"page_content": "package com.google.cloud.tools.jib.api;\n\nimport java.text.MessageFormat;\n\n/** Thrown because registry authentication failed. */\npublic class RegistryAuthenticationFailedException extends RegistryException {\n\n private static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n private final String serverUrl;\n private final String imageName;\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param cause the underlying cause that triggered this exception\n */\n public RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n\n\nThis chunk is part of the Java class `RegistryAuthenticationFailedException` which extends the `RegistryException` class. The class is used to represent an exception that occurs when registry authentication fails.", "metadata": {"chunk_id": "doc_40_chunk_1", "original_index": 1, "pid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09_1"}, "type": "Document"} +{"page_content": " /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param reason the underlying reason that triggered this exception\n */\n public RegistryAuthenticationFailedException(String serverUrl, String imageName, String reason) {\n super(MessageFormat.format(REASON, serverUrl, imageName, reason));\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * The server being authenticated.\n *\n * @return the server being authenticated\n */\n public String getServerUrl() {\n return serverUrl;\n }\n\n /**\n * The image being authenticated.\n *\n * @return the image being authenticated\n */\n public String getImageName() {\n return imageName;\n }\n}\n\n\nThis chunk of code defines a custom exception class called `RegistryAuthenticationFailedException` that is thrown when registry authentication fails. The class provides constructors to create the exception with a human-readable message, and getter methods to retrieve the server URL and image name associated with the authentication failure.", "metadata": {"chunk_id": "doc_40_chunk_2", "original_index": 2, "pid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09_2"}, "type": "Document"} +{"page_content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n\n\nThe provided chunk is a set of unit tests for the `PepperGenerator` class, which is likely responsible for generating a \"pepper\" value used in password hashing. The tests cover various scenarios, including generating a pepper with a specified length, generating a pepper without specifying a length, and handling negative or zero length inputs.", "metadata": {"chunk_id": "doc_41_chunk_0", "original_index": 0, "pid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b_0"}, "type": "Document"} +{"page_content": " @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n\n\nThe provided chunk contains unit tests for the PepperGenerator class, which include testing the generation of pepper strings with different length parameters, as well as a test for the \"AlicePepper\" constant.", "metadata": {"chunk_id": "doc_41_chunk_1", "original_index": 1, "pid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b_1"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\n\n\nThe provided chunk is the copyright and license information for the Password4j library, which is located at the beginning of the document.", "metadata": {"chunk_id": "doc_42_chunk_0", "original_index": 0, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_0"}, "type": "Document"} +{"page_content": "package com.password4j;\n\n/**\n * Class in the hierarchy to avoid code duplication.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic abstract class AbstractHashingFunction implements HashingFunction\n{\n\n /**\n * Compares two {@link CharSequence}s as byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first CharSequence\n * @param b the second CharSequence\n * @return true if both {@link CharSequence}s are the same, false if not\n */\n protected static boolean slowEquals(CharSequence a, CharSequence b)\n {\n return slowEquals(Utils.fromCharSequenceToBytes(a), Utils.fromCharSequenceToBytes(b));\n }\n\n\n\nThe provided chunk is the implementation of the `AbstractHashingFunction` class, which is part of the `com.password4j` package. This class serves as a base class for various hashing functions, providing common functionality to avoid code duplication.", "metadata": {"chunk_id": "doc_42_chunk_1", "original_index": 1, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_1"}, "type": "Document"} +{"page_content": " /**\n * Compares two byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first byte array\n * @param b the second byte array\n * @return true if both byte arrays are the same, false if not\n */\n protected static boolean slowEquals(byte[] a, byte[] b)\n {\n int diff = a.length ^ b.length;\n for (int i = 0; i < a.length && i < b.length; i++)\n {\n diff |= a[i] ^ b[i];\n }\n return diff == 0;\n }\n\n\n\nThe provided chunk is a method implementation within the `AbstractHashingFunction` class, which is part of the `com.password4j` package. The method, `slowEquals`, compares two byte arrays in a length-constant time to prevent timing attacks on password hashes.", "metadata": {"chunk_id": "doc_42_chunk_2", "original_index": 2, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_2"}, "type": "Document"} +{"page_content": " @Override\n public Hash hash(CharSequence plainTextPassword, String salt, CharSequence pepper)\n {\n CharSequence peppered = Utils.append(pepper, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n @Override\n public Hash hash(byte[] plainTextPassword, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n byte[] peppered = Utils.append(pepperAsBytes, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n\n\nThe provided chunk contains the implementation of the `hash` method in the `AbstractHashingFunction` class, which is responsible for hashing a plaintext password with an optional salt and pepper.", "metadata": {"chunk_id": "doc_42_chunk_3", "original_index": 3, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_3"}, "type": "Document"} +{"page_content": " /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 0.1.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n\n\nThe provided chunk is a part of the `AbstractHashingFunction` class, which is an abstract class that implements the `HashingFunction` interface. The chunk overrides the `check` method, which is used to verify if a given plaintext password matches the provided hash and salt.", "metadata": {"chunk_id": "doc_42_chunk_4", "original_index": 4, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_4"}, "type": "Document"} +{"page_content": " /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password as bytes array\n * @param hashed the hash as bytes array\n * @param salt the salt as bytes array used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n\n\nThe provided chunk is a part of the `AbstractHashingFunction` class, which is an abstract class that implements the `HashingFunction` interface. The chunk overrides the `check` method to compare a plaintext password (as a byte array) with a hashed password (also as a byte array), without the need for a manually provided salt.", "metadata": {"chunk_id": "doc_42_chunk_5", "original_index": 5, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_5"}, "type": "Document"} +{"page_content": " /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.5.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt, CharSequence pepper)\n {\n return check(Utils.append(pepper, plainTextPassword), hashed, salt);\n }\n\n\n\nThe provided chunk is a method implementation within the `AbstractHashingFunction` class, which is part of the `com.password4j` package. This method is responsible for checking if a given plaintext password, when combined with a provided pepper, matches the stored hash and salt.", "metadata": {"chunk_id": "doc_42_chunk_6", "original_index": 6, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_6"}, "type": "Document"} +{"page_content": " /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n return check(Utils.append(pepperAsBytes, plainTextPassword), hashed, salt);\n }\n\n}\n\n\nThis chunk is part of the `AbstractHashingFunction` class, which is an abstract class that provides common functionality for hashing functions. The `check` method in this chunk is an overloaded version that takes a byte array for the plaintext password, the hash, the salt, and a pepper, and calls the `check` method that takes the plaintext password as a `CharSequence`, the hash, and the salt.", "metadata": {"chunk_id": "doc_42_chunk_7", "original_index": 7, "pid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_7"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n\n\nThis chunk is the beginning of a Java test class named `BalloonHashingFunctionTest` that tests the functionality of the `BalloonHashingFunction` class, which is part of the `com.password4j` package.", "metadata": {"chunk_id": "doc_43_chunk_0", "original_index": 0, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_0"}, "type": "Document"} +{"page_content": " private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n\n\nThis chunk contains test vectors for the BalloonHashingFunction class, which is a part of the Password4j library. The test vectors cover various input parameters and expected output hashes for the hashing function, including single-threaded and multi-threaded scenarios.", "metadata": {"chunk_id": "doc_43_chunk_1", "original_index": 1, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_1"}, "type": "Document"} +{"page_content": " // Multiple threads\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 4, 3, \"1832bd8e5cbeba1cb174a13838095e7e66508e9bf04c40178990adbc8ba9eb6f\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 2, 3, \"f8767fe04059cef67b4427cda99bf8bcdd983959dbd399a5e63ea04523716c23\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 3, 3, \"bcad257eff3d1090b50276514857e60db5d0ec484129013ef3c88f7d36e438d6\"},\n\n\nThe provided chunk contains test vectors for the BalloonHashingFunction class, specifically testing the behavior of the function with multiple threads. These test vectors cover various input parameters and expected output hashes.", "metadata": {"chunk_id": "doc_43_chunk_2", "original_index": 2, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_2"}, "type": "Document"} +{"page_content": " new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n\n\nThis chunk contains test vectors for the BalloonHashingFunction class, which is a part of the Password4j library. The test vectors cover various configurations of the hashing function, including different input parameters, number of threads, and expected output hashes.", "metadata": {"chunk_id": "doc_43_chunk_3", "original_index": 3, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_3"}, "type": "Document"} +{"page_content": " };\n\n\n @Test\n public void test()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = new BalloonHashingFunction((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n }\n\n }\n\n @Test\n public void testInstance()\n {\n\n\n\nThe provided chunk is a part of a test suite for the `BalloonHashingFunction` class, which is a hashing function implementation. The chunk includes two test methods, `test()` and `testInstance()`, that verify the correctness of the hashing function's behavior.", "metadata": {"chunk_id": "doc_43_chunk_4", "original_index": 4, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_4"}, "type": "Document"} +{"page_content": " BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n\n\nThe provided chunk is part of a test suite for the `BalloonHashingFunction` class, which is used to test the functionality of the class by verifying the correctness of the hashing and checking operations.", "metadata": {"chunk_id": "doc_43_chunk_5", "original_index": 5, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_5"}, "type": "Document"} +{"page_content": " Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n Assert.assertTrue(balloonHashingFunction.check(((String) testVector[0]).getBytes(), ((String) testVector[7]).getBytes(), ((String) testVector[1]).getBytes()));\n }\n\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String m = \"SHA-256\";\n int i = 2;\n int p = 3;\n int l = 4;\n int v = 5;\n BalloonHashingFunction balloonHashingFunction = BalloonHashingFunction.getInstance(m, i, p, l, v);\n\n\n\nThe provided chunk is part of a test suite for the `BalloonHashingFunction` class, which is a hashing function implementation. The chunk includes tests for checking the correctness of the `check()` method, which verifies if a given password matches the stored hash, and tests for checking the equality and hashCode methods of the `BalloonHashingFunction` class.", "metadata": {"chunk_id": "doc_43_chunk_6", "original_index": 6, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_6"}, "type": "Document"} +{"page_content": " // THEN\n boolean eqNull = balloonHashingFunction.equals(null);\n boolean eqClass = balloonHashingFunction.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = balloonHashingFunction.equals(BalloonHashingFunction.getInstance(m, i, p, l, v));\n boolean sameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v));\n String toString = balloonHashingFunction.toString();\n int hashCode = balloonHashingFunction.hashCode();\n boolean notSameInst1 = balloonHashingFunction.equals(new BalloonHashingFunction(\"SHA-512\", i, p, l, v));\n\n\nThe provided chunk is part of a test suite for the `BalloonHashingFunction` class, which is testing the equality, `toString()`, and `hashCode()` methods of the class.", "metadata": {"chunk_id": "doc_43_chunk_7", "original_index": 7, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_7"}, "type": "Document"} +{"page_content": " boolean notSameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i+1, p, l, v));\n boolean notSameInst3 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p+1, l, v));\n boolean notSameInst4 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l+1, v));\n boolean notSameInst6 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v+1));\n\n\n\nThe provided chunk is part of a test suite for the `BalloonHashingFunction` class, which is testing the equality and hashCode methods of the class. The chunk specifically checks that the `equals` method correctly identifies instances of the `BalloonHashingFunction` class that have different parameter values as not being equal.", "metadata": {"chunk_id": "doc_43_chunk_8", "original_index": 8, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_8"}, "type": "Document"} +{"page_content": " // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n\n\nThis chunk is part of a unit test for the `BalloonHashingFunction` class, which is testing the equality and hashCode methods of the class.", "metadata": {"chunk_id": "doc_43_chunk_9", "original_index": 9, "pid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_9"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\n\n\nclass Blake2b\n{\n private static final long[] IV = { 0x6a09e667f3bcc908L, 0xbb67ae8584caa73bL, 0x3c6ef372fe94f82bL, 0xa54ff53a5f1d36f1L,\n 0x510e527fade682d1L, 0x9b05688c2b3e6c1fL, 0x1f83d9abfb41bd6bL, 0x5be0cd19137e2179L };\n\n\n\nThis chunk appears to be the beginning of a Java class named `Blake2b` that implements the BLAKE2b cryptographic hash function. The class is part of the `com.password4j` package and includes the copyright notice and license information for the Password4j library.", "metadata": {"chunk_id": "doc_44_chunk_0", "original_index": 0, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_0"}, "type": "Document"} +{"page_content": " private static final byte[][] SIGMA = { { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 }, { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },\n { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 }, { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },\n { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 }, { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },\n { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 }, { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },\n { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 } };\n\n\n\nThe chunk represents the SIGMA constant used in the BLAKE2b hash function implementation within the overall document.", "metadata": {"chunk_id": "doc_44_chunk_1", "original_index": 1, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_1"}, "type": "Document"} +{"page_content": " private static final int ROUNDS = 12;\n\n private static final int BLOCK_LENGTH_BYTES = 128;\n\n private final int digestLength;\n\n private final int keyLength;\n\n private final byte[] buffer;\n\n private final long[] internalState = new long[16];\n\n private int bufferPos = 0;\n\n private long[] chainValue = null;\n\n private long t0 = 0L;\n\n private long t1 = 0L;\n\n private long f0 = 0L;\n\n\n\nThis chunk defines the constants and fields used in the Blake2b class, which is an implementation of the BLAKE2b cryptographic hash function.", "metadata": {"chunk_id": "doc_44_chunk_2", "original_index": 2, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_2"}, "type": "Document"} +{"page_content": " /**\n * Basic sized constructor - size in bytes.\n *\n * @param digestSize size of the digest in bytes\n */\n Blake2b(int digestSize)\n {\n if (digestSize < 1 || digestSize > 64)\n {\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n }\n\n buffer = new byte[BLOCK_LENGTH_BYTES];\n keyLength = 0;\n this.digestLength = digestSize;\n init();\n }\n\n\n\nThe provided chunk is the constructor for the `Blake2b` class, which is responsible for initializing the BLAKE2b hash function with a specified digest size.", "metadata": {"chunk_id": "doc_44_chunk_3", "original_index": 3, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_3"}, "type": "Document"} +{"page_content": " // initialize chainValue\n private void init()\n {\n chainValue = new long[8];\n chainValue[0] = IV[0] ^ (digestLength | ((long) keyLength << 8) | 0x1010000);\n chainValue[1] = IV[1];\n chainValue[2] = IV[2];\n chainValue[3] = IV[3];\n chainValue[4] = IV[4];\n chainValue[5] = IV[5];\n chainValue[6] = IV[6];\n chainValue[7] = IV[7];\n }\n\n\n\nThe provided chunk is a private method named `init()` that initializes the `chainValue` array, which is a crucial part of the BLAKE2b hash function implementation.", "metadata": {"chunk_id": "doc_44_chunk_4", "original_index": 4, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_4"}, "type": "Document"} +{"page_content": " private void initializeInternalState()\n {\n System.arraycopy(chainValue, 0, internalState, 0, chainValue.length);\n System.arraycopy(IV, 0, internalState, chainValue.length, 4);\n internalState[12] = t0 ^ IV[4];\n internalState[13] = t1 ^ IV[5];\n internalState[14] = f0 ^ IV[6];\n internalState[15] = IV[7];// ^ f1 with f1 = 0\n }\n\n\n\nThe provided chunk is a private method named `initializeInternalState()` that is part of the `Blake2b` class. This method is responsible for initializing the internal state of the BLAKE2b hash function, which is a cryptographic hash algorithm used for secure hashing of data.", "metadata": {"chunk_id": "doc_44_chunk_5", "original_index": 5, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_5"}, "type": "Document"} +{"page_content": " void update(byte[] message)\n {\n if (message == null)\n {\n return;\n }\n update(message, 0, message.length);\n }\n\n /**\n * update the message digest with a block of bytes.\n *\n * @param message the byte array containing the data.\n * @param offset the offset into the byte array where the data starts.\n * @param len the length of the data.\n */\n void update(byte[] message, int offset, int len)\n {\n int remainingLength = 0;\n\n\n\nThe provided chunk is part of the `Blake2b` class, which is an implementation of the BLAKE2b cryptographic hash function. The `update()` methods are responsible for updating the internal state of the hash function with the provided message data.", "metadata": {"chunk_id": "doc_44_chunk_6", "original_index": 6, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_6"}, "type": "Document"} +{"page_content": " if (bufferPos != 0)\n {\n remainingLength = BLOCK_LENGTH_BYTES - bufferPos;\n if (remainingLength < len)\n {\n System.arraycopy(message, offset, buffer, bufferPos, remainingLength);\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n bufferPos = 0;\n Arrays.fill(buffer, (byte) 0);// clear buffer\n }\n else\n {\n System.arraycopy(message, offset, buffer, bufferPos, len);\n bufferPos += len;\n return;\n }\n }\n\n\n\nThe provided chunk is part of the `update()` method in the `Blake2b` class, which is responsible for updating the message digest with a block of bytes.", "metadata": {"chunk_id": "doc_44_chunk_7", "original_index": 7, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_7"}, "type": "Document"} +{"page_content": " int messagePos;\n int blockWiseLastPos = offset + len - BLOCK_LENGTH_BYTES;\n for (messagePos = offset + remainingLength; messagePos < blockWiseLastPos; messagePos += BLOCK_LENGTH_BYTES)\n {\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(message, messagePos);\n }\n\n\n\nThe provided chunk is part of the `update()` method in the `Blake2b` class, which is responsible for updating the message digest with a block of bytes. The chunk iterates over the input message in block-wise fashion, updating the internal state and compressing the data.", "metadata": {"chunk_id": "doc_44_chunk_8", "original_index": 8, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_8"}, "type": "Document"} +{"page_content": " // fill the buffer with left bytes, this might be a full block\n System.arraycopy(message, messagePos, buffer, 0, offset + len - messagePos);\n bufferPos += offset + len - messagePos;\n }\n\n /**\n * close the digest, producing the final digest value. The doFinal\n * call leaves the digest reset.\n * Key, salt and personal string remain.\n *\n * @param out the array the digest is to be copied into.\n * @param outOffset the offset into the out array the digest is to start at.\n */\n void doFinal(byte[] out, int outOffset)\n {\n\n\n\nThe provided chunk is part of the `Blake2b` class, which is an implementation of the BLAKE2b cryptographic hash function. The chunk is responsible for finalizing the hash computation and copying the resulting digest into the provided output array.", "metadata": {"chunk_id": "doc_44_chunk_9", "original_index": 9, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_9"}, "type": "Document"} +{"page_content": " f0 = 0xFFFFFFFFFFFFFFFFL;\n t0 += bufferPos;\n if (bufferPos > 0 && t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n Arrays.fill(buffer, (byte) 0);// Holds eventually the key if input is null\n Arrays.fill(internalState, 0L);\n\n for (int i = 0; i < chainValue.length && (i * 8 < digestLength); i++)\n {\n byte[] bytes = Utils.longToLittleEndian(chainValue[i]);\n\n\n\nThis chunk is part of the `doFinal()` method in the `Blake2b` class, which is responsible for finalizing the digest and producing the final digest value.", "metadata": {"chunk_id": "doc_44_chunk_10", "original_index": 10, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_10"}, "type": "Document"} +{"page_content": " if (i * 8 < digestLength - 8)\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, 8);\n }\n else\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, digestLength - (i * 8));\n }\n }\n\n Arrays.fill(chainValue, 0L);\n\n reset();\n }\n\n\n\nThis chunk is part of the `doFinal()` method of the `Blake2b` class, which is responsible for finalizing the digest and copying the result into the provided output array. It handles the case where the digest length is not a multiple of 8 bytes, ensuring that the last few bytes are correctly copied.", "metadata": {"chunk_id": "doc_44_chunk_11", "original_index": 11, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_11"}, "type": "Document"} +{"page_content": " /**\n * Reset the digest back to it's initial state.\n * The key, the salt and the personal string will\n * remain for further computations.\n */\n void reset()\n {\n bufferPos = 0;\n f0 = 0L;\n t0 = 0L;\n t1 = 0L;\n chainValue = null;\n Arrays.fill(buffer, (byte) 0);\n init();\n }\n\n\n\nThe provided chunk is a method named `reset()` that resets the internal state of the `Blake2b` class, which is a Java implementation of the BLAKE2b cryptographic hash function. This method is part of the overall `Blake2b` class implementation.", "metadata": {"chunk_id": "doc_44_chunk_12", "original_index": 12, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_12"}, "type": "Document"} +{"page_content": " private void compress(byte[] message, int messagePos)\n {\n\n initializeInternalState();\n\n long[] m = new long[16];\n for (int j = 0; j < 16; j++)\n {\n m[j] = Utils.littleEndianToLong(message, messagePos + j * 8);\n }\n\n for (int round = 0; round < ROUNDS; round++)\n {\n\n\n\nThe provided chunk is a part of the `compress()` method within the `Blake2b` class, which is responsible for the core compression function of the BLAKE2b cryptographic hash algorithm.", "metadata": {"chunk_id": "doc_44_chunk_13", "original_index": 13, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_13"}, "type": "Document"} +{"page_content": " // G apply to columns of internalState:m[blake2b_sigma[round][2 *\n // blockPos]] /+1\n functionG(m[SIGMA[round][0]], m[SIGMA[round][1]], 0, 4, 8, 12);\n functionG(m[SIGMA[round][2]], m[SIGMA[round][3]], 1, 5, 9, 13);\n functionG(m[SIGMA[round][4]], m[SIGMA[round][5]], 2, 6, 10, 14);\n functionG(m[SIGMA[round][6]], m[SIGMA[round][7]], 3, 7, 11, 15);\n // G apply to diagonals of internalState:\n functionG(m[SIGMA[round][8]], m[SIGMA[round][9]], 0, 5, 10, 15);\n functionG(m[SIGMA[round][10]], m[SIGMA[round][11]], 1, 6, 11, 12);\n functionG(m[SIGMA[round][12]], m[SIGMA[round][13]], 2, 7, 8, 13);\n functionG(m[SIGMA[round][14]], m[SIGMA[round][15]], 3, 4, 9, 14);\n }\n\n\n\nThe provided chunk is part of the `compress()` method of the `Blake2b` class, which is responsible for the core compression function of the BLAKE2b cryptographic hash algorithm. This chunk applies the `functionG()` operation to the internal state of the algorithm, which is a key step in the BLAKE2b compression process.", "metadata": {"chunk_id": "doc_44_chunk_14", "original_index": 14, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_14"}, "type": "Document"} +{"page_content": " // update chain values:\n for (int offset = 0; offset < chainValue.length; offset++)\n {\n chainValue[offset] = chainValue[offset] ^ internalState[offset] ^ internalState[offset + 8];\n }\n }\n\n private void functionG(long m1, long m2, int posA, int posB, int posC, int posD)\n {\n\n\n\nThe provided chunk is part of the `compress()` method in the `Blake2b` class, which is responsible for updating the internal state of the BLAKE2b hash function. The `compress()` method is called during the `update()` and `doFinal()` methods to process the input data and update the hash value.", "metadata": {"chunk_id": "doc_44_chunk_15", "original_index": 15, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_15"}, "type": "Document"} +{"page_content": " internalState[posA] = internalState[posA] + internalState[posB] + m1;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 32);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 24); // replaces 25 of BLAKE\n internalState[posA] = internalState[posA] + internalState[posB] + m2;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 16);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 63); // replaces 11 of BLAKE\n }\n}\n\n\nThe provided chunk is part of the `functionG()` method within the `Blake2b` class, which is responsible for updating the internal state of the BLAKE2b hash function during the compression step of the algorithm.", "metadata": {"chunk_id": "doc_44_chunk_16", "original_index": 16, "pid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_16"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n\n\nThe provided chunk is the copyright notice and package declaration for the `BadParametersException` class in the `com.password4j` package.", "metadata": {"chunk_id": "doc_45_chunk_0", "original_index": 0, "pid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_0"}, "type": "Document"} +{"page_content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n\n\nThis chunk defines a custom exception class called `BadParametersException` that extends `IllegalArgumentException`. The exception is used to handle cases where a function is called with invalid or malformed parameters.", "metadata": {"chunk_id": "doc_45_chunk_1", "original_index": 1, "pid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_1"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\nimport java.util.Objects;\n\n\n\nThis chunk contains the package declaration and import statements for the `com.password4j` package, which likely includes the definition of the `Hash` class.", "metadata": {"chunk_id": "doc_46_chunk_0", "original_index": 0, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_0"}, "type": "Document"} +{"page_content": "\n/**\n * This class contains all the information computed after\n * calculating a cryptographic hash.\n *

\n * The same {@link HashingFunction} used to generate the hash\n * is used to verify the plain password; in addition cryptographic\n * seasoning such as salt and pepper are stored in this object.\n *

\n * A hash is the product of a one-way function that maps data of arbitrary size to\n * fixed-size values; it is called hashing function (HF).\n * This class represent hashes generated by cryptographic hash function (CHF),\n * where each function has the following properties:\n *

    \n *
  • it is deterministic, meaning that the same message always results in the same hash
  • \n *
  • it is quick to compute the hash value for any given message
  • \n *
  • it is infeasible to generate a message that yields a given hash value
  • \n *
  • it is infeasible to find two different messages with the same hash value
  • \n\n\nThe chunk describes the purpose and properties of the Hash class, which represents the information computed after calculating a cryptographic hash.", "metadata": {"chunk_id": "doc_46_chunk_1", "original_index": 1, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_1"}, "type": "Document"} +{"page_content": " *
  • a small change to a message should change the hash value so extensively that the new hash value\n * appears uncorrelated with the old hash value
  • \n *
\n *

\n * A salt is a unique, randomly generated string that is added to each password as part of the hashing process.\n * As the salt is unique for every user, an attacker has to crack hashes one at a time using the respective salt,\n * rather than being able to calculate a hash once and compare it against every stored hash.\n *

\n\n\nThis chunk describes the properties of cryptographic hash functions and the purpose of using salts in password hashing.", "metadata": {"chunk_id": "doc_46_chunk_2", "original_index": 2, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_2"}, "type": "Document"} +{"page_content": " * A pepper can be used in additional to salting to provide an additional layer of protection.\n * It is similar to a salt, but has two key differences:\n *

    \n *
  • The pepper is shared between all stored passwords, rather than being unique like a salt.
  • \n *
  • The pepper is not stored in the database, unlike the salts.
  • \n *
\n *\n * @author David Bertoldi\n * @see
OWASP Password Storage Cheat Sheet\n * @see Key derivation function\n * @see Cryptographic hash function\n * @since 0.1.0\n */\npublic class Hash\n{\n\n\n\nThis chunk describes the concept of a \"pepper\" and how it is used in addition to salting to provide an additional layer of protection for password storage. It is part of the documentation for the \"Hash\" class, which represents the information computed after calculating a cryptographic hash.", "metadata": {"chunk_id": "doc_46_chunk_3", "original_index": 3, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_3"}, "type": "Document"} +{"page_content": " /**\n * Represents the full output of a cryptographic hashing function.\n * Depending on the implementation of the CHF, it may contain\n * the salt and the configuration of the CHF itself.\n */\n private byte[] result;\n\n /**\n * Represents the computed output of a cryptographic hashing function.\n * It never contains salt and other configurations.\n */\n private byte[] bytes;\n\n /**\n * Represents the salt: random data that is used as an additional input\n * to a cryptographic hashing function.\n */\n private byte[] salt;\n\n\n\nThe chunk represents the private fields of the `Hash` class, which stores the output of a cryptographic hashing function, the salt used in the hashing process, and the computed hash without additional information.", "metadata": {"chunk_id": "doc_46_chunk_4", "original_index": 4, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_4"}, "type": "Document"} +{"page_content": " /**\n * Represents the pepper: a secret added to the input password\n * prior to being hashed with a cryptographic hash function\n */\n private CharSequence pepper;\n\n /**\n * Represents the hashing function used to generate this object.\n *\n * @see HashingFunction for more details\n */\n private HashingFunction hashingFunction;\n\n /**\n * It is meant to not be used if not internally.\n * The other constructor must be used instead.\n *\n * @see Hash#Hash(HashingFunction, String, byte[], String)\n * @since 0.1.0\n */\n @SuppressWarnings(\"unused\")\n private Hash()\n {\n //\n }\n\n\n\nThis chunk represents the private member variables and a private constructor of the `Hash` class, which is used to store the information computed after calculating a cryptographic hash.", "metadata": {"chunk_id": "doc_46_chunk_5", "original_index": 5, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_5"}, "type": "Document"} +{"page_content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n\n\nThe chunk describes the constructor of the `Hash` class, which is responsible for creating an instance of the `Hash` object that contains the information used and produced by the process of hashing a password, including the cryptographic algorithm, the hash result, the hash bytes, and the salt.", "metadata": {"chunk_id": "doc_46_chunk_6", "original_index": 6, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_6"}, "type": "Document"} +{"page_content": " * @deprecated As of 1.8.1 because of the salt conversion from {@link String} to byte[].\n * {@link Hash#Hash(HashingFunction, String, byte[], byte[])} should be used instead.\n */\n @Deprecated\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, String salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, Utils.fromCharSequenceToBytes(salt));\n }\n\n\n\n\n\nThis chunk is part of the `Hash` class, which represents the information computed after calculating a cryptographic hash. The chunk contains a deprecated constructor that takes a `String` for the salt, and a new constructor that takes a `byte[]` for the salt, as the salt representation has been changed from `String` to `byte[]` in version 1.8.1 of the library.", "metadata": {"chunk_id": "doc_46_chunk_7", "original_index": 7, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_7"}, "type": "Document"} +{"page_content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n */\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, byte[] salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, salt);\n }\n\n\n\nThis chunk is part of the `Hash` class, which represents the information computed after calculating a cryptographic hash. The constructor it defines is responsible for initializing the `Hash` object with the necessary information, including the hashing function, the hash result, the hash bytes, and the salt used in the hashing process.", "metadata": {"chunk_id": "doc_46_chunk_8", "original_index": 8, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_8"}, "type": "Document"} +{"page_content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash as bytes array.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n\n\nThis chunk describes the constructor of the `Hash` class, which is responsible for creating an instance of the `Hash` object that contains the basic information used and produced by the computational process of hashing a password.", "metadata": {"chunk_id": "doc_46_chunk_9", "original_index": 9, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_9"}, "type": "Document"} +{"page_content": " * @param salt the salt used for the computation as bytes array.\n * @since 1.7.0\n */\n public Hash(HashingFunction hashingFunction, byte[] result, byte[] bytes, byte[] salt)\n {\n this.hashingFunction = hashingFunction;\n this.salt = salt;\n this.result = result;\n this.bytes = bytes;\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public String getResult()\n {\n return Utils.fromBytesToString(result);\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public byte[] getResultAsBytes()\n {\n return result;\n }\n\n /**\n * Retrieves the hash as byte array and without the parameters\n * encoded in the final hash.\n *\n * @return the hash.\n * @since 1.5.1\n */\n public byte[] getBytes()\n {\n return bytes;\n }\n\n\n\nThis chunk contains the constructor and getter methods for the `Hash` class, which is used to store the information computed after calculating a cryptographic hash, including the hash result, salt, and hashing function used.", "metadata": {"chunk_id": "doc_46_chunk_10", "original_index": 10, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_10"}, "type": "Document"} +{"page_content": " /**\n * Retrieves the {@link HashingFunction} used\n * to hash the password.\n *\n * @return the CHF\n * @since 0.4.0\n */\n public HashingFunction getHashingFunction()\n {\n return hashingFunction;\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as {@link String}.\n * @since 0.1.0\n */\n public String getSalt()\n {\n return Utils.fromBytesToString(salt);\n }\n\n\n\nThe chunk contains methods that retrieve the hashing function and salt used to generate the hash, which are important properties of the Hash class.", "metadata": {"chunk_id": "doc_46_chunk_11", "original_index": 11, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_11"}, "type": "Document"} +{"page_content": " /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as bytes array.\n * @since 1.7.0\n */\n public byte[] getSaltBytes()\n {\n return salt;\n }\n\n /**\n * Retrieves the pepper used with the password in the hashing function.\n *\n * @return the pepper.\n * @since 0.1.0\n */\n public CharSequence getPepper()\n {\n return pepper;\n }\n\n /**\n * Stores the pepper used together with the password in the hashing function.\n *

\n * This methods should be used just after the creation of this object.\n *\n * @param pepper the pepper used.\n * @since 0.1.0\n */\n void setPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n }\n\n\n\nThis chunk of code defines methods to retrieve the salt and pepper used in the hashing function, as well as a method to set the pepper. It is part of the `Hash` class, which represents the information computed after calculating a cryptographic hash.", "metadata": {"chunk_id": "doc_46_chunk_12", "original_index": 12, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_12"}, "type": "Document"} +{"page_content": " /**\n * Produces a human-readable description of the {@link Hash}.\n *\n * @return a readable version of this object\n * @since 0.1.0\n */\n @Override\n public String toString()\n {\n StringBuilder sb = new StringBuilder();\n if (this.hashingFunction != null)\n {\n sb.append(hashingFunction.getClass().getSimpleName());\n }\n sb.append(\"[salt=\").append(getSalt()).append(\", pepper=\").append(getPepper()).append(\", hash=\").append(getResult())\n .append(\"]\");\n return sb.toString();\n }\n\n\n\nThe chunk represents the implementation of the `toString()` method in the `Hash` class, which provides a human-readable description of the hash object, including the hashing function, salt, pepper, and the computed hash.", "metadata": {"chunk_id": "doc_46_chunk_13", "original_index": 13, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_13"}, "type": "Document"} +{"page_content": " /**\n * Two {@link Hash}es are considered equals if they contain\n * the same hash, salt, pepper and they are generated with\n * the same {@link HashingFunction}\n *\n * @param obj the object to compare\n * @return true if equals\n * @since 0.1.0\n */\n @Override\n public boolean equals(Object obj)\n {\n if (obj == null || !this.getClass().equals(obj.getClass()))\n {\n return false;\n }\n\n\n\nThe chunk represents the implementation of the `equals()` method in the `Hash` class, which compares two `Hash` objects to determine if they are equal based on their hash, salt, pepper, and hashing function.", "metadata": {"chunk_id": "doc_46_chunk_14", "original_index": 14, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_14"}, "type": "Document"} +{"page_content": " Hash otherHash = (Hash) obj;\n return hasSameValues(otherHash);\n }\n\n private boolean hasSameValues(Hash otherHash)\n {\n return Arrays.equals(this.result, otherHash.result) //\n && Arrays.equals(this.bytes, otherHash.bytes) //\n && Arrays.equals(this.salt, otherHash.salt) //\n && areEquals(this.pepper, otherHash.pepper) //\n && this.hashingFunction.equals(otherHash.hashingFunction);\n }\n\n private static boolean areEquals(CharSequence cs1, CharSequence cs2)\n {\n if (cs1 == cs2)\n {\n return true;\n }\n else if (cs1 != null && cs2 != null)\n {\n return cs1.equals(cs2);\n }\n return false;\n }\n\n @Override\n public int hashCode()\n {\n return Objects.hash(Arrays.hashCode(result), Arrays.hashCode(salt), pepper, hashingFunction);\n }\n}\n\n\nThe chunk is part of the `Hash` class, which represents the information computed after calculating a cryptographic hash. The `equals()` and `hashCode()` methods are implemented to compare and hash `Hash` objects based on their properties, such as the hash result, salt, pepper, and hashing function.", "metadata": {"chunk_id": "doc_46_chunk_15", "original_index": 15, "pid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_15"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n\n\nThis chunk contains the copyright notice and package declaration for the `com.password4j` package, which is part of the Password4j library.", "metadata": {"chunk_id": "doc_47_chunk_0", "original_index": 0, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_0"}, "type": "Document"} +{"page_content": "/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n\n\nThe provided chunk is the beginning of the HashBuilder class, which is a builder class that helps create a chain of parameters to be used in the hashing process. This class is part of the Password4j library, which is a Java library for password hashing and verification.", "metadata": {"chunk_id": "doc_47_chunk_1", "original_index": 1, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_1"}, "type": "Document"} +{"page_content": " /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param salt cryptographic salt\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addSalt(String salt)\n {\n this.salt = Utils.fromCharSequenceToBytes(salt);\n return this;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param saltAsBytes cryptographic salt as bytes array\n * @return this builder\n * @since 1.7.0\n */\n public HashBuilder addSalt(byte[] saltAsBytes)\n {\n this.salt = saltAsBytes;\n return this;\n }\n\n\n\nThe provided chunk is part of the `HashBuilder` class, which is responsible for creating a chain of parameters to be used in the hashing process. The chunk specifically deals with adding a cryptographic salt to the hashing process, which is an important step in password hashing to increase the security of the process.", "metadata": {"chunk_id": "doc_47_chunk_2", "original_index": 2, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_2"}, "type": "Document"} +{"page_content": " /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

\n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n\n\nThe chunk is part of the `HashBuilder` class, which is responsible for creating a chain of parameters to be used in the hashing process. The `addRandomSalt()` method adds a random cryptographic salt to the hashing process, which is applied differently depending on the chosen algorithm.", "metadata": {"chunk_id": "doc_47_chunk_3", "original_index": 3, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_3"}, "type": "Document"} +{"page_content": " /**\n * Add a random cryptographic salt in the hashing process with a given length.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param length the length of the salt produced\n * @return this builder\n * @throws BadParametersException if the length is non-positive\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt(int length)\n {\n if (length <= 0)\n {\n throw new BadParametersException(\"Salt cannot have a non-positive length\");\n }\n else\n {\n this.salt = SaltGenerator.generate(length);\n }\n return this;\n }\n\n\n\nThe chunk is a method within the `HashBuilder` class, which is responsible for building the parameters used in the hashing process. The `addRandomSalt` method allows the user to add a random cryptographic salt with a specified length to the hashing process.", "metadata": {"chunk_id": "doc_47_chunk_4", "original_index": 4, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_4"}, "type": "Document"} +{"page_content": " /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n\n\nThe provided chunk is part of the `HashBuilder` class, which is responsible for creating a chain of parameters to be used in the hashing process. The chunk specifically deals with the addition of a cryptographic pepper to the plain text password, either by using the pepper configured in the `psw4j.properties` file or by providing a custom pepper.", "metadata": {"chunk_id": "doc_47_chunk_5", "original_index": 5, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_5"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

\n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n\n\nThe chunk is a method within the `HashBuilder` class that hashes the previously given plain text password using a specific implementation of the `HashingFunction` interface. This method does not read the configurations in the `psw4j.properties` file.", "metadata": {"chunk_id": "doc_47_chunk_6", "original_index": 6, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_6"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n\n\nThe chunk is a method within the `HashBuilder` class, which is responsible for building a chain of parameters to be used in the hashing process. The `withPBKDF2()` method specifically hashes the previously given plain text password using the PBKDF2 hashing function, reading the configurations from the `psw4j.properties` file if available, and finally calling the `with(HashingFunction)` method to perform the hashing.", "metadata": {"chunk_id": "doc_47_chunk_7", "original_index": 7, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_7"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n\n\nThe chunk is a method within the `HashBuilder` class that provides a way to hash a password using the `CompressedPBKDF2Function` algorithm. This method reads the configurations from the `psw4j.properties` file and uses the `AlgorithmFinder` to get an instance of the `CompressedPBKDF2Function` to perform the hashing.", "metadata": {"chunk_id": "doc_47_chunk_8", "original_index": 8, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_8"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link BcryptFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getBcryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withBcrypt()\n {\n return with(AlgorithmFinder.getBcryptInstance());\n }\n\n\n\nThe chunk is a method within the `HashBuilder` class that provides a way to hash a password using the Bcrypt hashing function.", "metadata": {"chunk_id": "doc_47_chunk_9", "original_index": 9, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_9"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link ScryptFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getScryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withScrypt()\n {\n return with(AlgorithmFinder.getScryptInstance());\n }\n\n\n\nThe chunk is a method within the `HashBuilder` class that provides a way to hash a password using the Scrypt hashing function. It reads the configuration from the `psw4j.properties` file and then calls the `with()` method to perform the hashing.", "metadata": {"chunk_id": "doc_47_chunk_10", "original_index": 10, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_10"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link MessageDigestFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.4.0\n */\n public Hash withMessageDigest()\n {\n return with(AlgorithmFinder.getMessageDigestInstance());\n }\n\n\n\nThe chunk is a method within the `HashBuilder` class that provides a way to hash a password using the `MessageDigestFunction` algorithm. This method reads the configurations from the `psw4j.properties` file and calls the `with(HashingFunction)` method to perform the hashing.", "metadata": {"chunk_id": "doc_47_chunk_11", "original_index": 11, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_11"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link Argon2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.5.0\n */\n public Hash withArgon2()\n {\n return with(AlgorithmFinder.getArgon2Instance());\n }\n\n\n\nThe chunk is a method within the `HashBuilder` class that provides a way to hash a password using the Argon2 hashing function. It reads the configuration from the `psw4j.properties` file and calls the `with(HashingFunction)` method to perform the hashing.", "metadata": {"chunk_id": "doc_47_chunk_12", "original_index": 12, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_12"}, "type": "Document"} +{"page_content": " /**\n * Hashes the previously given plain text password\n * with {@link BalloonHashingFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.8.0\n */\n public Hash withBalloonHashing()\n {\n return with(AlgorithmFinder.getBalloonHashingInstance());\n }\n\n}\n\n\nThe chunk is a method within the `HashBuilder` class, which is responsible for building a chain of parameters to be used in the hashing process. The `withBalloonHashing()` method specifically hashes the previously given plain text password using the `BalloonHashingFunction`, reading the configurations from the `psw4j.properties` file if available, and finally calling the `with(HashingFunction)` method.", "metadata": {"chunk_id": "doc_47_chunk_13", "original_index": 13, "pid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_13"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n\n\nThe provided chunk is the header and package declaration of a Java class or interface within the `com.password4j` package. This class or interface is likely part of the Password4j library, which is a Java library for password hashing and verification.", "metadata": {"chunk_id": "doc_48_chunk_0", "original_index": 0, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_0"}, "type": "Document"} +{"page_content": "import com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n\n\nThe provided chunk is a test case for the `MessageDigestFunctionTest` class, which is part of the `com.password4j` package. The test case specifically tests the MD5 hashing algorithm implementation.", "metadata": {"chunk_id": "doc_48_chunk_1", "original_index": 1, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_1"}, "type": "Document"} +{"page_content": "\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n\n\nThis chunk tests the behavior of the MessageDigestFunction class, specifically the MD5 hashing algorithm with and without salt. It verifies that the hashed results are different when the salt is prepended versus appended.", "metadata": {"chunk_id": "doc_48_chunk_2", "original_index": 2, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_2"}, "type": "Document"} +{"page_content": " @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n\n\nThis chunk tests the behavior of the MessageDigestFunction class, which is part of the Password4j library. It checks that the function can handle various message digest algorithms, and that it throws an exception when an unsupported algorithm is provided.", "metadata": {"chunk_id": "doc_48_chunk_3", "original_index": 3, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_3"}, "type": "Document"} +{"page_content": " // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n\n\nThis chunk tests the behavior of the MessageDigestFunction class when an unsupported salt option is provided, ensuring that the default salt option (APPEND) is used in such cases.", "metadata": {"chunk_id": "doc_48_chunk_4", "original_index": 4, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_4"}, "type": "Document"} +{"page_content": " @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n\n\nThe chunk tests the behavior of the MessageDigestFunction and CompressedPBKDF2Function classes, including checking the salt option and verifying the correctness of PBKDF2 password hashing.", "metadata": {"chunk_id": "doc_48_chunk_5", "original_index": 5, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_5"}, "type": "Document"} +{"page_content": " // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n\n\nThe chunk is part of a test suite for the Password4j library, specifically testing the functionality of the PBKDF2 hashing algorithm. The tests cover checking the validity of a hashed password, handling invalid hashes, and ensuring the coherence of the PBKDF2 implementation.", "metadata": {"chunk_id": "doc_48_chunk_6", "original_index": 6, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_6"}, "type": "Document"} +{"page_content": "\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n\n\nThe chunk is part of a test suite for the Password4j library, which provides password hashing and verification functionality. The tests cover various aspects of the library's MessageDigestFunction and PBKDF2Function implementations, including handling of invalid input and retrieving algorithm information.", "metadata": {"chunk_id": "doc_48_chunk_7", "original_index": 7, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_7"}, "type": "Document"} +{"page_content": "\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n\n\nThe chunk tests the functionality of the PBKDF2 hashing algorithm, including checking the coherence of the hashed password and the original password.", "metadata": {"chunk_id": "doc_48_chunk_8", "original_index": 8, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_8"}, "type": "Document"} +{"page_content": " @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n\n\nThis chunk of code is part of a test suite for the Password4j library, which provides password hashing and verification functionality. The tests in this chunk focus on the PBKDF2 (Password-Based Key Derivation Function 2) hashing algorithm, including checking the behavior of the `CompressedPBKDF2Function` and testing the equality of different `PBKDF2Function` instances.", "metadata": {"chunk_id": "doc_48_chunk_9", "original_index": 9, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_9"}, "type": "Document"} +{"page_content": "\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n\n\nThe chunk is part of a test suite for the Password4j library, specifically testing the PBKDF2 hashing function and its compressed variant.", "metadata": {"chunk_id": "doc_48_chunk_10", "original_index": 10, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_10"}, "type": "Document"} +{"page_content": " String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n\n\nThe chunk is part of a test suite for the Password4j library, specifically testing the functionality of the CompressedPBKDF2Function and MessageDigestFunction classes.", "metadata": {"chunk_id": "doc_48_chunk_11", "original_index": 11, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_11"}, "type": "Document"} +{"page_content": " // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n\n\nThe provided chunk is part of a test suite for the `MessageDigestFunction` class, which is testing the equality and hashCode methods of the class.", "metadata": {"chunk_id": "doc_48_chunk_12", "original_index": 12, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_12"}, "type": "Document"} +{"page_content": "\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n\n\nThe chunk is part of a test suite for the `MessageDigestFunction` class, which is responsible for hashing passwords using various message digest algorithms. The specific test case is checking the equality and hashCode methods of the `MessageDigestFunction` class.", "metadata": {"chunk_id": "doc_48_chunk_13", "original_index": 13, "pid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_13"}, "type": "Document"} +{"page_content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n\n\nThis chunk of code is part of a test suite for the Password4j library, which is a Java library for password hashing and verification. The test suite covers various issues that have been reported and fixed in the library, and this particular chunk is related to issue #92, which involves testing the Argon2 password hashing function.", "metadata": {"chunk_id": "doc_49_chunk_0", "original_index": 0, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_0"}, "type": "Document"} +{"page_content": " boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n\n\nThis chunk of code is part of the `IssuesTest` class, which is testing the functionality of the `Password4j` library. Specifically, this chunk is testing the resolution of issue #92, where it verifies the password and generates a new hash with a custom salt.", "metadata": {"chunk_id": "doc_49_chunk_1", "original_index": 1, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_1"}, "type": "Document"} +{"page_content": "\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n\n\nThis chunk of code is part of a test suite for the Password4j library, specifically addressing issue #99 related to the Argon2 hashing function. The test demonstrates how to create an Argon2 instance with specific parameters and generate a hash using the provided password and salt.", "metadata": {"chunk_id": "doc_49_chunk_2", "original_index": 2, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_2"}, "type": "Document"} +{"page_content": " (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n\n\nThis chunk of code demonstrates how to use the Argon2 function to hash a password with a specific set of parameters, and then compares the resulting hash to an expected value.", "metadata": {"chunk_id": "doc_49_chunk_3", "original_index": 3, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_3"}, "type": "Document"} +{"page_content": " /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n\n\nThe provided chunk is a test case that addresses issue #93 in the Password4j library. It verifies the correct behavior of the Argon2 password hashing function when checking a password against a pre-computed hash.", "metadata": {"chunk_id": "doc_49_chunk_4", "original_index": 4, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_4"}, "type": "Document"} +{"page_content": "\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n\n\nThis chunk of code is part of a test suite for the Password4j library, specifically addressing issue #120 on the project's GitHub repository. The test checks the behavior of the Password.hash() method when an empty provider is added to the Java Security framework.", "metadata": {"chunk_id": "doc_49_chunk_5", "original_index": 5, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_5"}, "type": "Document"} +{"page_content": " // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"’(っ^▿^)۶\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A٩(˘◡˘ ) ❌❌ ❌❌❌\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n\n\nThis chunk of code is part of a test suite for the Password4j library, which is a Java library for password hashing and verification. The chunk specifically addresses issue #120 and issue #126 from the library's GitHub repository.", "metadata": {"chunk_id": "doc_49_chunk_6", "original_index": 6, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_6"}, "type": "Document"} +{"page_content": " Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"ŸŁĀPRČ\")\n .addSalt(\"ŸŁĀPRČAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n\n\nThis chunk of code is part of the `issue126()` test method in the `IssuesTest` class. It tests the hashing of passwords with special characters using the Scrypt and Argon2 algorithms.", "metadata": {"chunk_id": "doc_49_chunk_7", "original_index": 7, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_7"}, "type": "Document"} +{"page_content": " private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n\n\nThe provided chunk is a private utility method named `printBytesToString` that takes a byte array as input and returns a hexadecimal string representation of the bytes. This method is used within the `IssuesTest` class to format the byte arrays returned by the password hashing functions for comparison purposes.", "metadata": {"chunk_id": "doc_49_chunk_8", "original_index": 8, "pid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_8"}, "type": "Document"} +{"page_content": "/*\n * (C) Copyright 2021 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j.types;\n\n\n\nThe chunk represents the license and package information for the Argon2 enum class in the Password4j library.", "metadata": {"chunk_id": "doc_50_chunk_0", "original_index": 0, "pid": "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e_0"}, "type": "Document"} +{"page_content": "/**\n * Enum containing the different variations of Argon2.\n *\n * @author David Bertoldi\n * @see Argon2\n * @since 1.5.0\n */\npublic enum Argon2\n{\n /**\n * It maximizes resistance to GPU cracking attacks.\n * It accesses the memory array in a password dependent order, which reduces the possibility of time–memory trade-off (TMTO) attacks,\n * but introduces possible side-channel attacks\n */\n D,\n\n /**\n * It is optimized to resist side-channel attacks. It accesses the memory array in a password independent order.\n */\n I,\n\n /**\n * It is a hybrid version. It follows the Argon2i approach for the first half pass over memory and the Argon2d approach for subsequent passes.\n * It is recommended to use Argon2id except when there are reasons to prefer one of the other two modes.\n */\n ID;\n\n}\n\n\nThe chunk describes an enum named `Argon2` that contains different variations of the Argon2 password hashing algorithm, with details on the characteristics and use cases of each variation.", "metadata": {"chunk_id": "doc_50_chunk_1", "original_index": 1, "pid": "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e_1"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/plugins/wasi_crypto/symmetric/tag.h - Symmetric Tag class ===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the Symmetric Tag definition.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n\n\nThis chunk contains the header file for the Symmetric Tag class, which is part of the WasmEdge Project's WASI Crypto plugin. The class provides an implementation of an authentication tag that can be verified without using channels.", "metadata": {"chunk_id": "doc_51_chunk_0", "original_index": 0, "pid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298_0"}, "type": "Document"} +{"page_content": "#include \"utils/error.h\"\n#include \"utils/secret_vec.h\"\n\n#include \"common/span.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace Symmetric {\n\n/// Authentication tag, that can be verified without channels using the provided\n/// APIs. Very small and no streaming.\n///\n/// More detail:\n/// https://github.com/WebAssembly/wasi-crypto/blob/main/docs/wasi-crypto.md#authentication-tags\nclass Tag {\npublic:\n Tag(Tag &&Data) noexcept = default;\n Tag &operator=(Tag &&Data) noexcept = default;\n Tag(const Tag &Data) noexcept = delete;\n Tag &operator=(const Tag &Data) noexcept = delete;\n\n\n\nThis chunk defines the `Tag` class within the `WasmEdge::Host::WasiCrypto::Symmetric` namespace, which represents an authentication tag that can be verified without using channels. It includes the necessary header files and defines the public methods and member variables of the `Tag` class.", "metadata": {"chunk_id": "doc_51_chunk_1", "original_index": 1, "pid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298_1"}, "type": "Document"} +{"page_content": " Tag(SecretVec &&Data) noexcept : Data(std::move(Data)) {}\n\n size_t len() const noexcept { return Data.size(); }\n\n /// The function MUST return `__WASI_CRYPTO_ERRNO_INVALID_TAG` if the\n /// tags don't match.\n WasiCryptoExpect verify(Span RawTag) const noexcept;\n\n WasiCryptoExpect pull(Span Raw) const noexcept;\n\nprivate:\n SecretVec Data;\n};\n\n} // namespace Symmetric\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n\n\nThis chunk defines the implementation of the Symmetric Tag class within the WasmEdge project's WasiCrypto namespace. It includes the constructor, length accessor, tag verification, and data retrieval methods, as well as the private data member.", "metadata": {"chunk_id": "doc_51_chunk_2", "original_index": 2, "pid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298_2"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"loader/serialize.h\"\n\n#include \n#include \n#include \n\nnamespace {\n\nWasmEdge::Configure Conf;\nWasmEdge::Loader::Serializer Ser(Conf);\n\nWasmEdge::AST::CodeSection\ncreateCodeSec(std::vector Instructions) {\n WasmEdge::AST::CodeSection CodeSec;\n WasmEdge::AST::CodeSegment CodeSeg;\n WasmEdge::AST::Expression Expr;\n Expr.getInstrs() = Instructions;\n CodeSeg.getExpr() = Expr;\n CodeSec.getContent().push_back(CodeSeg);\n return CodeSec;\n}\n\n\n\nThis chunk contains the setup code for the test suite, including the creation of a WasmEdge configuration and serializer, as well as a helper function to create a code section from a vector of instructions.", "metadata": {"chunk_id": "doc_52_chunk_0", "original_index": 0, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_0"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeBlockControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 1. Test block control instructions.\n //\n // 1. Serialize block with only end operation.\n // 2. Serialize loop with only end operation.\n // 3. Serialize block with instructions.\n // 4. Serialize loop with instructions.\n\n WasmEdge::AST::Instruction Block(WasmEdge::OpCode::Block);\n WasmEdge::AST::Instruction Loop(WasmEdge::OpCode::Loop);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n\n\nThis chunk tests the serialization of block control instructions, including blocks and loops with and without instructions, within the overall document that tests the serialization of various WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_1", "original_index": 1, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_1"}, "type": "Document"} +{"page_content": " Block.setEmptyBlockType();\n Instructions = {Block, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThe chunk represents a test case for serializing a block control instruction with only an end operation in the WasmEdge Loader Serializer.", "metadata": {"chunk_id": "doc_52_chunk_2", "original_index": 2, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_2"}, "type": "Document"} +{"page_content": " Loop.setEmptyBlockType();\n Instructions = {Loop, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThe chunk represents the serialization of a loop control instruction with an empty block type, which is part of the test suite for serializing WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_3", "original_index": 3, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_3"}, "type": "Document"} +{"page_content": " Loop.setEmptyBlockType();\n Instructions = {Block, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk demonstrates the serialization of a block control instruction with instructions inside the block, including i32.eqz, i32.eq, and i32.ne instructions.", "metadata": {"chunk_id": "doc_52_chunk_4", "original_index": 4, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_4"}, "type": "Document"} +{"page_content": " Loop.setEmptyBlockType();\n Instructions = {Loop, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n\n\nThis chunk demonstrates the serialization of a loop control instruction with instructions inside the loop block.", "metadata": {"chunk_id": "doc_52_chunk_5", "original_index": 5, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_5"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeIfElseControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 2. Test serialize if-else control instruction.\n //\n // 1. Serialize if statement with only end operation.\n // 2. Serialize if and else statements with only end operation.\n // 3. Serialize if statement with instructions.\n // 4. Serialize if and else statements with instructions.\n\n\n\nThe chunk tests the serialization of if-else control instructions in the WasmEdge Loader Serializer.", "metadata": {"chunk_id": "doc_52_chunk_6", "original_index": 6, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_6"}, "type": "Document"} +{"page_content": " WasmEdge::AST::Instruction If(WasmEdge::OpCode::If);\n WasmEdge::AST::Instruction Else(WasmEdge::OpCode::Else);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n If.setEmptyBlockType();\n Instructions = {If, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThe provided chunk demonstrates the serialization of an if-statement control instruction with only an end operation. It is part of a larger test suite that verifies the serialization of various WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_7", "original_index": 7, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_7"}, "type": "Document"} +{"page_content": " If.setEmptyBlockType();\n Instructions = {If, Else, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x05U, // OpCode Else\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk demonstrates the serialization of an if-else control instruction in the WasmEdge Loader Serializer.", "metadata": {"chunk_id": "doc_52_chunk_8", "original_index": 8, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_8"}, "type": "Document"} +{"page_content": " If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk demonstrates the serialization of an if control instruction with instructions inside the if block, as part of the SerializeIfElseControlInstruction test case.", "metadata": {"chunk_id": "doc_52_chunk_9", "original_index": 9, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_9"}, "type": "Document"} +{"page_content": " If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, Else,\n I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0EU, // Content size = 14\n 0x01U, // Vector length = 1\n 0x0CU, // Code segment size = 12\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x05U, // OpCode Else\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in else statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n\n\nThis chunk demonstrates the serialization of an if-else control instruction with instructions in both the if and else statements.", "metadata": {"chunk_id": "doc_52_chunk_10", "original_index": 10, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_10"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeBrControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 3. Test branch control instructions.\n //\n // 1. Serialize valid label index.\n\n WasmEdge::AST::Instruction Br(WasmEdge::OpCode::Br);\n WasmEdge::AST::Instruction BrIf(WasmEdge::OpCode::Br_if);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n\n\nThe chunk tests the serialization of branch control instructions, specifically the `Br` and `Br_if` instructions, within the overall document that tests the serialization of various WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_11", "original_index": 11, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_11"}, "type": "Document"} +{"page_content": " Br.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {Br, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x0CU, // OpCode Br.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThe chunk is part of a test case that serializes the branch control instruction `Br` with a valid label index.", "metadata": {"chunk_id": "doc_52_chunk_12", "original_index": 12, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_12"}, "type": "Document"} +{"page_content": " BrIf.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrIf, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected[5] = 0x0DU; // OpCode Br_if.\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeBrTableControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 4. Test branch table control instruction.\n //\n // 1. Serialize instruction with empty label vector.\n // 2. Serialize instruction with label vector.\n\n WasmEdge::AST::Instruction BrTable(WasmEdge::OpCode::Br_table);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n\n\nThis chunk tests the serialization of branch control instructions, specifically the `Br_if` and `Br_table` instructions, within the overall document that tests the serialization of various WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_13", "original_index": 13, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_13"}, "type": "Document"} +{"page_content": " BrTable.setLabelListSize(1);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x00U, // Vector length = 0\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk demonstrates the serialization of a branch table control instruction with an empty label vector.", "metadata": {"chunk_id": "doc_52_chunk_14", "original_index": 14, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_14"}, "type": "Document"} +{"page_content": " BrTable.setLabelListSize(4);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFF1U;\n BrTable.getLabelList()[1].TargetIndex = 0xFFFFFFF2U;\n BrTable.getLabelList()[2].TargetIndex = 0xFFFFFFF3U;\n BrTable.getLabelList()[3].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n\n\nThis chunk is part of a test case for serializing the Br_table control instruction in the WasmEdge Loader Serializer. It demonstrates serializing an instruction with a label vector containing multiple target indices.", "metadata": {"chunk_id": "doc_52_chunk_15", "original_index": 15, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_15"}, "type": "Document"} +{"page_content": " 0x1AU, // Content size = 26\n 0x01U, // Vector length = 1\n 0x18U, // Code segment size = 24\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x03U, // Vector length = 3\n 0xF1U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[0]\n 0xF2U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[1]\n 0xF3U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[2]\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n\n\nThis chunk represents the serialization of a Br_table control instruction with a label vector of size 3 and a default label index, as part of a larger test suite for serializing WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_16", "original_index": 16, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_16"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeCallControlInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 5. Test call control instructions.\n //\n // 1. Serialize call instruction with valid type index.\n // 2. Serialize call_indirect instruction with valid type and table index.\n // 3. Serialize call_indirect instruction with invalid table index without\n // Ref-Types proposal.\n\n WasmEdge::AST::Instruction Call(WasmEdge::OpCode::Call);\n WasmEdge::AST::Instruction CallIndirect(WasmEdge::OpCode::Call_indirect);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n\n\nThis chunk tests the serialization of call control instructions, including the serialization of call instructions with valid type indices and call_indirect instructions with valid type and table indices. It also tests the serialization of call_indirect instructions with invalid table indices without the Reference Types proposal.", "metadata": {"chunk_id": "doc_52_chunk_17", "original_index": 17, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_17"}, "type": "Document"} +{"page_content": " Call.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {Call, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x10U, // OpCode Call.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Function type index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThe chunk is part of a test suite for serializing WebAssembly instructions, specifically testing the serialization of the `Call` control instruction with a valid function type index.", "metadata": {"chunk_id": "doc_52_chunk_18", "original_index": 18, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_18"}, "type": "Document"} +{"page_content": " CallIndirect.getTargetIndex() = 0xFFFFFFFFU;\n CallIndirect.getSourceIndex() = 0x05U;\n Instructions = {CallIndirect, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x11U, // OpCode Call_indirect.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Type index.\n 0x05U, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThe chunk is part of a test case that serializes the call_indirect control instruction, which is used to call an indirect function. The test case checks that the serializer correctly handles the call_indirect instruction with a valid type index and table index.", "metadata": {"chunk_id": "doc_52_chunk_19", "original_index": 19, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_19"}, "type": "Document"} +{"page_content": " EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeReferenceInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 6. Test reference instructions.\n //\n // 1. Serialize function reference type.\n // 2. Serialize invalid reference type without Ref-Types proposal.\n\n WasmEdge::AST::Instruction RefNull(WasmEdge::OpCode::Ref__null);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n\n\nThis chunk tests the serialization of reference instructions, including handling invalid reference types when the Reference Types proposal is not enabled.", "metadata": {"chunk_id": "doc_52_chunk_20", "original_index": 20, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_20"}, "type": "Document"} +{"page_content": " RefNull.setValType(WasmEdge::TypeCode::FuncRef);\n Instructions = {RefNull, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0xD0U, // OpCode Ref__null.\n 0x70U, // FuncRef\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThe chunk is part of a test suite that verifies the serialization of various WebAssembly instructions, specifically testing the serialization of the `Ref__null` instruction with a `FuncRef` value type.", "metadata": {"chunk_id": "doc_52_chunk_21", "original_index": 21, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_21"}, "type": "Document"} +{"page_content": " RefNull.setValType(WasmEdge::TypeCode::ExternRef);\n Instructions = {RefNull, End};\n EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeParametricInstruction) {\n WasmEdge::Configure ConfNoSIMD;\n ConfNoSIMD.removeProposal(WasmEdge::Proposal::SIMD);\n WasmEdge::Loader::Serializer SerNoSIMD(ConfNoSIMD);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 7. Test parametric instructions.\n //\n // 1. Serialize valid select_t instruction with value type list.\n // 2. Serialize invalid value type list without SIMD proposal.\n\n WasmEdge::AST::Instruction SelectT(WasmEdge::OpCode::Select_t);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n\n\nThis chunk tests the serialization of reference instructions and parametric instructions in the WasmEdge Loader Serializer. It checks the behavior when the Reference Types proposal is not enabled, and when the SIMD proposal is not enabled.", "metadata": {"chunk_id": "doc_52_chunk_22", "original_index": 22, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_22"}, "type": "Document"} +{"page_content": " SelectT.setValTypeListSize(2);\n SelectT.getValTypeList()[0] = WasmEdge::TypeCode::I32;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::I64;\n Instructions = {SelectT, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x1CU, // OpCode Select_t.\n 0x02U, // Vector length = 2\n 0x7FU, 0x7EU, // Value types\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk demonstrates the serialization of a valid `select_t` instruction with a value type list of `i32` and `i64`.", "metadata": {"chunk_id": "doc_52_chunk_23", "original_index": 23, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_23"}, "type": "Document"} +{"page_content": " SelectT.getValTypeList()[0] = WasmEdge::TypeCode::V128;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::V128;\n Instructions = {SelectT, End};\n EXPECT_FALSE(SerNoSIMD.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeVariableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 8. Test variable instructions.\n //\n // 1. Serialize valid local or global index.\n\n WasmEdge::AST::Instruction LocalGet(WasmEdge::OpCode::Local__get);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n\n\nThe chunk is part of a test suite for serializing WebAssembly instructions, specifically testing the serialization of parametric instructions and variable instructions.", "metadata": {"chunk_id": "doc_52_chunk_24", "original_index": 24, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_24"}, "type": "Document"} +{"page_content": " LocalGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {LocalGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x20U, // OpCode Local__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Local index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n\n\nThis chunk demonstrates the serialization of a local get instruction, which is part of the variable instructions section of the WebAssembly instruction set.", "metadata": {"chunk_id": "doc_52_chunk_25", "original_index": 25, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_25"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeTableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 9. Test table instructions.\n //\n // 1. Serialize table_get instruction.\n // 2. Serialize table_init instruction.\n\n WasmEdge::AST::Instruction TableGet(WasmEdge::OpCode::Table__get);\n WasmEdge::AST::Instruction TableInit(WasmEdge::OpCode::Table__init);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n\n\nThis chunk tests the serialization of table instructions, including the serialization of the `table_get` and `table_init` instructions, within the overall document that tests the serialization of various WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_26", "original_index": 26, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_26"}, "type": "Document"} +{"page_content": " TableGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x25U, // OpCode Table__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk demonstrates the serialization of the `table_get` instruction, which is part of the table instructions section of the WebAssembly instruction set.", "metadata": {"chunk_id": "doc_52_chunk_27", "original_index": 27, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_27"}, "type": "Document"} +{"page_content": " TableInit.getSourceIndex() = 0x05U;\n TableInit.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableInit, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0CU, // Content size = 12\n 0x01U, // Vector length = 1\n 0x0AU, // Code segment size = 10\n 0x00U, // Local vec(0)\n 0xFCU, 0x0CU, // OpCode Table__init.\n 0x05U, // Element idx.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n\n\nThis chunk demonstrates the serialization of the `Table__init` instruction, which is part of the table instructions section of the WebAssembly instruction set.", "metadata": {"chunk_id": "doc_52_chunk_28", "original_index": 28, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_28"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeMemoryInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 10. Test memory instructions.\n //\n // 1. Serialize memory_grow instruction.\n // 2. Serialize i32_load instruction.\n\n WasmEdge::AST::Instruction MemoryGrow(WasmEdge::OpCode::Memory__grow);\n WasmEdge::AST::Instruction I32Load(WasmEdge::OpCode::I32__load);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n Instructions = {MemoryGrow, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0x40U, // OpCode Memory__grow.\n 0x00U, // Checking byte\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk tests the serialization of memory instructions, specifically the memory_grow and i32_load instructions, within the overall document that tests the serialization of various WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_29", "original_index": 29, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_29"}, "type": "Document"} +{"page_content": " I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk demonstrates the serialization of the i32_load instruction, which is a memory instruction in the WebAssembly instruction set. It shows how the serializer sets the memory alignment and offset for the instruction and then serializes the instruction into the expected byte sequence.", "metadata": {"chunk_id": "doc_52_chunk_30", "original_index": 30, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_30"}, "type": "Document"} +{"page_content": " I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n\n\nThis chunk demonstrates the serialization of the i32_load instruction, which is a memory instruction in the WebAssembly instruction set. It shows how the serializer handles the alignment and offset parameters for this instruction.", "metadata": {"chunk_id": "doc_52_chunk_31", "original_index": 31, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_31"}, "type": "Document"} +{"page_content": "TEST(SerializeInstructionTest, SerializeConstInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 11. Test const numeric instructions.\n //\n // 1. Serialize I32 const numeric instruction.\n // 2. Serialize I64 const numeric instruction.\n // 3. Serialize F32 const numeric instruction.\n // 4. Serialize F64 const numeric instruction.\n\n WasmEdge::AST::Instruction I32Const(WasmEdge::OpCode::I32__const);\n WasmEdge::AST::Instruction I64Const(WasmEdge::OpCode::I64__const);\n WasmEdge::AST::Instruction F32Const(WasmEdge::OpCode::F32__const);\n WasmEdge::AST::Instruction F64Const(WasmEdge::OpCode::F64__const);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n\n\nThis chunk tests the serialization of various constant numeric instructions, including I32, I64, F32, and F64 constants, within the overall document that tests the serialization of various WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_32", "original_index": 32, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_32"}, "type": "Document"} +{"page_content": " I32Const.setNum(-123456);\n Instructions = {I32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x41U, // OpCode I32__const.\n 0xC0U, 0xBBU, 0x78U, // I32 -123456.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk demonstrates the serialization of an I32 constant numeric instruction in the context of a test suite for the serialization of various WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_33", "original_index": 33, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_33"}, "type": "Document"} +{"page_content": " I64Const.setNum(static_cast(-112233445566L));\n Instructions = {I64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x42U, // OpCode I64__const.\n 0xC2U, 0x8EU, 0xF6U, 0xF2U, 0xDDU, 0x7CU, // I64 -112233445566\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk demonstrates the serialization of an I64 constant instruction within the overall document, which tests the serialization of various WebAssembly instructions.", "metadata": {"chunk_id": "doc_52_chunk_34", "original_index": 34, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_34"}, "type": "Document"} +{"page_content": " F32Const.setNum(static_cast(-0x1.921fb4p+1)); // -3.1415926F\n Instructions = {F32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x09U, // Content size = 9\n 0x01U, // Vector length = 1\n 0x07U, // Code segment size = 7\n 0x00U, // Local vec(0)\n 0x43U, // OpCode F32__const.\n 0xDAU, 0x0FU, 0x49U, 0xC0U, // F32 -3.1415926\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n\n\nThis chunk demonstrates the serialization of an F32 constant instruction with the value -3.1415926F.", "metadata": {"chunk_id": "doc_52_chunk_35", "original_index": 35, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_35"}, "type": "Document"} +{"page_content": " F64Const.setNum(-3.1415926535897932);\n Instructions = {F64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0DU, // Content size = 13\n 0x01U, // Vector length = 1\n 0x0BU, // Code segment size = 11\n 0x00U, // Local vec(0)\n 0x44U, // OpCode F64__const.\n 0x18U, 0x2DU, 0x44U, 0x54U,\n 0xFBU, 0x21U, 0x09U, 0xC0U, // F64 -3.1415926535897932\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n} // namespace\n\n\nThis chunk is part of a test suite that verifies the serialization of various WebAssembly instructions, specifically the serialization of the F64_const instruction.", "metadata": {"chunk_id": "doc_52_chunk_36", "original_index": 36, "pid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_36"}, "type": "Document"} +{"page_content": "#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiLoggingMock {\n\nusing namespace std::literals;\n\nclass Log : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"wasi-logging\"sv);\n return Unexpect(ErrCode::Value::HostFuncError);\n }\n};\n\n} // namespace WasiLoggingMock\n} // namespace Host\n} // namespace WasmEdge\n\nThis chunk appears to be a C++ header file that defines a class called `Log` within the `WasmEdge::Host::WasiLoggingMock` namespace. The class is a host function that logs a message and returns an error code.", "metadata": {"chunk_id": "doc_53_chunk_0", "original_index": 0, "pid": "adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823_0"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"common/defines.h\"\n#if WASMEDGE_OS_MACOS\n\n#include \"common/errcode.h\"\n#include \"host/wasi/environ.h\"\n#include \"macos.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WASI {\n\nWasiExpect Environ::procRaise(__wasi_signal_t Signal) const noexcept {\n int SysSignal;\n switch (Signal) {\n case __WASI_SIGNAL_NONE:\n SysSignal = 0;\n break;\n case __WASI_SIGNAL_HUP:\n SysSignal = SIGHUP;\n break;\n case __WASI_SIGNAL_INT:\n SysSignal = SIGINT;\n break;\n case __WASI_SIGNAL_QUIT:\n SysSignal = SIGQUIT;\n break;\n case __WASI_SIGNAL_ILL:\n SysSignal = SIGILL;\n break;\n case __WASI_SIGNAL_TRAP:\n SysSignal = SIGTRAP;\n break;\n case __WASI_SIGNAL_ABRT:\n SysSignal = SIGABRT;\n break;\n case __WASI_SIGNAL_BUS:\n SysSignal = SIGBUS;\n break;\n case __WASI_SIGNAL_FPE:\n SysSignal = SIGFPE;\n break;\n\n\nThis chunk of code is part of the WasmEdge project, specifically the WASI (WebAssembly System Interface) implementation for the macOS operating system. It defines a function called `procRaise` that maps WASI signal types to their corresponding system signals on macOS.", "metadata": {"chunk_id": "doc_54_chunk_0", "original_index": 0, "pid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84_0"}, "type": "Document"} +{"page_content": " case __WASI_SIGNAL_KILL:\n SysSignal = SIGKILL;\n break;\n case __WASI_SIGNAL_USR1:\n SysSignal = SIGUSR1;\n break;\n case __WASI_SIGNAL_SEGV:\n SysSignal = SIGSEGV;\n break;\n case __WASI_SIGNAL_USR2:\n SysSignal = SIGUSR2;\n break;\n case __WASI_SIGNAL_PIPE:\n SysSignal = SIGPIPE;\n break;\n case __WASI_SIGNAL_ALRM:\n SysSignal = SIGALRM;\n break;\n case __WASI_SIGNAL_TERM:\n SysSignal = SIGTERM;\n break;\n case __WASI_SIGNAL_CHLD:\n SysSignal = SIGCHLD;\n break;\n case __WASI_SIGNAL_CONT:\n SysSignal = SIGCONT;\n break;\n case __WASI_SIGNAL_STOP:\n SysSignal = SIGSTOP;\n break;\n case __WASI_SIGNAL_TSTP:\n SysSignal = SIGTSTP;\n break;\n case __WASI_SIGNAL_TTIN:\n SysSignal = SIGTTIN;\n break;\n case __WASI_SIGNAL_TTOU:\n SysSignal = SIGTTOU;\n break;\n case __WASI_SIGNAL_URG:\n SysSignal = SIGURG;\n break;\n case __WASI_SIGNAL_XCPU:\n SysSignal = SIGXCPU;\n break;\n\n\nThis chunk of code is part of the `procRaise` function in the `Environ` namespace of the `WASI` module in the WasmEdge project. It maps various WASI signal constants to their corresponding system signal values for the macOS platform.", "metadata": {"chunk_id": "doc_54_chunk_1", "original_index": 1, "pid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84_1"}, "type": "Document"} +{"page_content": " case __WASI_SIGNAL_XFSZ:\n SysSignal = SIGXFSZ;\n break;\n case __WASI_SIGNAL_VTALRM:\n SysSignal = SIGVTALRM;\n break;\n case __WASI_SIGNAL_PROF:\n SysSignal = SIGPROF;\n break;\n case __WASI_SIGNAL_WINCH:\n SysSignal = SIGWINCH;\n break;\n case __WASI_SIGNAL_SYS:\n SysSignal = SIGSYS;\n break;\n case __WASI_SIGNAL_POLL:\n case __WASI_SIGNAL_PWR:\n default:\n return WasiUnexpect(__WASI_ERRNO_NOTSUP);\n }\n if (auto Res = std::raise(SysSignal); Res != 0) {\n return WasiUnexpect(fromErrNo(errno));\n }\n return {};\n}\n\nWasiExpect Environ::schedYield() const noexcept {\n ::sched_yield();\n return {};\n}\n\n} // namespace WASI\n} // namespace Host\n} // namespace WasmEdge\n\n#endif\n\n\nThis chunk of code is part of the implementation of the WASI (WebAssembly System Interface) Environ class in the WasmEdge runtime, specifically for the macOS platform. It handles the mapping between WASI signal types and their corresponding system signals, as well as the implementation of the `schedYield()` function.", "metadata": {"chunk_id": "doc_54_chunk_2", "original_index": 2, "pid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84_2"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCryptoMock {\n\nusing namespace std::literals;\nstatic inline constexpr const uint32_t kWASICryptoError = 1U;\n\nnamespace Common {\nclass ArrayOutputLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass ArrayOutputPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThis chunk contains the definitions of the `ArrayOutputLen` and `ArrayOutputPull` host functions within the `WasmEdge::Host::WasiCryptoMock` namespace, which are part of the WASI Crypto API implementation.", "metadata": {"chunk_id": "doc_55_chunk_0", "original_index": 0, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_0"}, "type": "Document"} +{"page_content": "class OptionsOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains the implementation of the `OptionsOpen` and `OptionsClose` host functions, which are part of the WASI-Crypto API provided by the WasmEdge runtime.", "metadata": {"chunk_id": "doc_55_chunk_1", "original_index": 1, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_1"}, "type": "Document"} +{"page_content": "class OptionsSet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains two classes, `OptionsSet` and `OptionsSetU64`, which are part of the `WasiCryptoMock` namespace within the `WasmEdge` namespace. These classes appear to be related to the WASI Crypto API and are likely used for setting options in the WASI Crypto API.", "metadata": {"chunk_id": "doc_55_chunk_2", "original_index": 2, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_2"}, "type": "Document"} +{"page_content": "class OptionsSetGuestBuffer\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains the implementation of several host functions related to the WASI-Crypto API, specifically the OptionsSetGuestBuffer, SecretsManagerOpen, and SecretsManagerClose functions. These functions are part of the WasmEdge::Host::WasiCryptoMock namespace, which provides a mock implementation of the WASI-Crypto API for testing purposes.", "metadata": {"chunk_id": "doc_55_chunk_3", "original_index": 3, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_3"}, "type": "Document"} +{"page_content": "class SecretsManagerInvalidate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Common\n\nnamespace AsymmetricCommon {\nclass KeypairGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThis chunk contains the implementation of the `SecretsManagerInvalidate` host function from the `Common` namespace, as well as the implementation of the `KeypairGenerate` and `KeypairImport` host functions from the `AsymmetricCommon` namespace, all of which are part of the WASI Crypto API.", "metadata": {"chunk_id": "doc_55_chunk_4", "original_index": 4, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_4"}, "type": "Document"} +{"page_content": "class KeypairGenerateManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the WasmEdge::Host::WasiCryptoMock namespace, which contains various host functions related to asymmetric cryptography operations in the WASI-Crypto API.", "metadata": {"chunk_id": "doc_55_chunk_5", "original_index": 5, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_5"}, "type": "Document"} +{"page_content": "class KeypairReplaceManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains two classes, `KeypairReplaceManaged` and `KeypairId`, which are part of the `AsymmetricCommon` namespace within the `WasmEdge::Host::WasiCryptoMock` namespace. These classes appear to be related to the management and identification of asymmetric key pairs in the WASI-Crypto mock implementation.", "metadata": {"chunk_id": "doc_55_chunk_6", "original_index": 6, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_6"}, "type": "Document"} +{"page_content": "class KeypairFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromPkAndSk : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the WasmEdge::Host::WasiCryptoMock namespace, which contains various host functions related to asymmetric cryptography operations in the WASI-Crypto API.", "metadata": {"chunk_id": "doc_55_chunk_7", "original_index": 7, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_7"}, "type": "Document"} +{"page_content": "class KeypairExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairPublickey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains two host functions, `KeypairExport` and `KeypairPublickey`, which are part of the `AsymmetricCommon` namespace within the `WasmEdge::Host::WasiCryptoMock` namespace. These functions are likely related to asymmetric cryptography operations.", "metadata": {"chunk_id": "doc_55_chunk_8", "original_index": 8, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_8"}, "type": "Document"} +{"page_content": "class KeypairSecretkey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThis chunk contains the implementation of several host functions related to asymmetric cryptography operations in the WASI-Crypto API, including functions for managing keypairs, public keys, and secret keys.", "metadata": {"chunk_id": "doc_55_chunk_9", "original_index": 9, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_9"}, "type": "Document"} +{"page_content": "class PublickeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains two host functions, `PublickeyExport` and `PublickeyVerify`, which are part of the `AsymmetricCommon` namespace within the `WasmEdge::Host::WasiCryptoMock` namespace. These functions are likely related to the handling and verification of public keys in the WASI-Crypto API.", "metadata": {"chunk_id": "doc_55_chunk_10", "original_index": 10, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_10"}, "type": "Document"} +{"page_content": "class PublickeyFromSecretkey\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the `AsymmetricCommon` namespace within the `WasmEdge::Host::WasiCryptoMock` namespace. It contains two host functions related to public key operations: `PublickeyFromSecretkey` and `PublickeyClose`.", "metadata": {"chunk_id": "doc_55_chunk_11", "original_index": 11, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_11"}, "type": "Document"} +{"page_content": "class SecretkeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the WasmEdge::Host::WasiCryptoMock namespace, which contains various host functions related to asymmetric cryptography operations.", "metadata": {"chunk_id": "doc_55_chunk_12", "original_index": 12, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_12"}, "type": "Document"} +{"page_content": "class SecretkeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace AsymmetricCommon\n\nnamespace Kx {\nclass Dh : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains two host function classes: `SecretkeyClose` and `Dh`. These classes are part of the `AsymmetricCommon` and `Kx` namespaces, respectively, within the `WasmEdge::Host::WasiCryptoMock` namespace.", "metadata": {"chunk_id": "doc_55_chunk_13", "original_index": 13, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_13"}, "type": "Document"} +{"page_content": "class Encapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Decapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Kx\n\n\n\nThe provided chunk is part of the \"Kx\" namespace within the \"WasmEdge::Host::WasiCryptoMock\" namespace. It contains two host functions, \"Encapsulate\" and \"Decapsulate\", which are likely related to key exchange functionality in the WASI-Crypto API.", "metadata": {"chunk_id": "doc_55_chunk_14", "original_index": 14, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_14"}, "type": "Document"} +{"page_content": "namespace Signatures {\nclass Export : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Import : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the \"Signatures\" namespace within the \"WasiCryptoMock\" namespace, which is part of the \"Host\" namespace within the \"WasmEdge\" namespace. This chunk contains the implementation of the \"Export\" and \"Import\" host functions for the WASI Crypto API.", "metadata": {"chunk_id": "doc_55_chunk_15", "original_index": 15, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_15"}, "type": "Document"} +{"page_content": "class StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateUpdate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains the implementation of the `StateOpen` and `StateUpdate` host functions, which are part of the WASI-Crypto API provided by the WasmEdge runtime.", "metadata": {"chunk_id": "doc_55_chunk_16", "original_index": 16, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_16"}, "type": "Document"} +{"page_content": "class StateSign : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the \"Signatures\" namespace within the \"WasiCryptoMock\" namespace, which is a part of the \"Host\" namespace in the \"WasmEdge\" namespace. The chunk contains two host functions: \"StateSign\" and \"StateClose\", which are likely related to signing and closing signature states in the WASI Crypto API.", "metadata": {"chunk_id": "doc_55_chunk_17", "original_index": 17, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_17"}, "type": "Document"} +{"page_content": "class VerificationStateOpen\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateUpdate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the \"Signatures\" namespace within the \"WasiCryptoMock\" namespace, which is part of the \"Host\" namespace in the \"WasmEdge\" namespace. The chunk contains the implementation of the \"VerificationStateOpen\" and \"VerificationStateUpdate\" host functions.", "metadata": {"chunk_id": "doc_55_chunk_18", "original_index": 18, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_18"}, "type": "Document"} +{"page_content": "class VerificationStateVerify\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateClose\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Close : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n} // namespace Signatures\n\n\n\nThe provided chunk is part of the \"Signatures\" namespace within the \"WasiCryptoMock\" namespace, which is part of the \"Host\" namespace within the \"WasmEdge\" namespace. The chunk contains the implementation of various host functions related to signature verification and closing of signature-related resources.", "metadata": {"chunk_id": "doc_55_chunk_19", "original_index": 19, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_19"}, "type": "Document"} +{"page_content": "namespace Symmetric {\nclass KeyGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the \"Symmetric\" namespace within the \"WasiCryptoMock\" namespace, which is part of the \"Host\" namespace in the \"WasmEdge\" namespace. This chunk contains the implementation of the \"KeyGenerate\" and \"KeyImport\" host functions for the WASI Crypto API.", "metadata": {"chunk_id": "doc_55_chunk_20", "original_index": 20, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_20"}, "type": "Document"} +{"page_content": "class KeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyGenerateManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the \"Symmetric\" namespace within the \"WasiCryptoMock\" namespace, which is a part of the \"Host\" namespace in the \"WasmEdge\" namespace. The chunk contains the implementation of various host functions related to symmetric key operations.", "metadata": {"chunk_id": "doc_55_chunk_21", "original_index": 21, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_21"}, "type": "Document"} +{"page_content": "class KeyStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyReplaceManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains two host function classes, `KeyStoreManaged` and `KeyReplaceManaged`, which are part of the `Symmetric` namespace within the `WasiCryptoMock` namespace of the `WasmEdge` library. These functions are likely related to the management and replacement of symmetric cryptographic keys.", "metadata": {"chunk_id": "doc_55_chunk_22", "original_index": 22, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_22"}, "type": "Document"} +{"page_content": "class KeyId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains two host functions, `KeyId` and `KeyFromId`, which are part of the `Symmetric` namespace within the `WasiCryptoMock` namespace of the `WasmEdge` library. These functions are likely related to the management and retrieval of symmetric cryptographic keys.", "metadata": {"chunk_id": "doc_55_chunk_23", "original_index": 23, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_23"}, "type": "Document"} +{"page_content": "class StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClone : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the Symmetric namespace within the WasmEdge::Host::WasiCryptoMock namespace, which contains various host functions related to symmetric cryptography operations.", "metadata": {"chunk_id": "doc_55_chunk_24", "original_index": 24, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_24"}, "type": "Document"} +{"page_content": "class StateOptionsGet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the \"Symmetric\" namespace within the \"WasiCryptoMock\" namespace, which is a part of the \"Host\" namespace in the \"WasmEdge\" namespace. The chunk contains two host functions related to getting options for a symmetric cryptographic state.", "metadata": {"chunk_id": "doc_55_chunk_25", "original_index": 25, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_25"}, "type": "Document"} +{"page_content": "class StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateAbsorb : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the Symmetric namespace within the WasiCryptoMock namespace, which contains various host functions related to symmetric cryptography operations.", "metadata": {"chunk_id": "doc_55_chunk_26", "original_index": 26, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_26"}, "type": "Document"} +{"page_content": "class StateSqueeze : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeTag : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the \"Symmetric\" namespace within the \"WasiCryptoMock\" namespace, which is a part of the \"Host\" namespace in the \"WasmEdge\" namespace. The chunk contains two host function classes, \"StateSqueeze\" and \"StateSqueezeTag\", which are likely related to symmetric cryptographic operations.", "metadata": {"chunk_id": "doc_55_chunk_27", "original_index": 27, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_27"}, "type": "Document"} +{"page_content": "class StateSqueezeKey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateMaxTagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the Symmetric namespace within the WasmEdge::Host::WasiCryptoMock namespace, which contains various host functions related to symmetric cryptography operations.", "metadata": {"chunk_id": "doc_55_chunk_28", "original_index": 28, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_28"}, "type": "Document"} +{"page_content": "class StateEncrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the Symmetric namespace within the WasmEdge::Host::WasiCryptoMock namespace, which contains various host functions related to symmetric cryptography operations.", "metadata": {"chunk_id": "doc_55_chunk_29", "original_index": 29, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_29"}, "type": "Document"} +{"page_content": "class StateDecrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk is part of the \"Symmetric\" namespace within the \"WasiCryptoMock\" namespace, which is a part of the \"Host\" namespace in the \"WasmEdge\" namespace. The chunk contains the implementation of the \"StateDecrypt\" and \"StateDecryptDetached\" host functions, which are likely related to symmetric cryptographic operations.", "metadata": {"chunk_id": "doc_55_chunk_30", "original_index": 30, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_30"}, "type": "Document"} +{"page_content": "class StateRatchet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n\n\nThe provided chunk contains two host function classes, `StateRatchet` and `TagLen`, which are part of the `Symmetric` namespace within the `WasiCryptoMock` namespace of the `WasmEdge` library. These functions are likely related to symmetric cryptographic operations.", "metadata": {"chunk_id": "doc_55_chunk_31", "original_index": 31, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_31"}, "type": "Document"} +{"page_content": "class TagPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Symmetric\n\n} // namespace WasiCryptoMock\n} // namespace Host\n} // namespace WasmEdge\n\n\nThis chunk defines three host functions related to symmetric cryptography in the WASI-Crypto mock implementation within the WasmEdge namespace.", "metadata": {"chunk_id": "doc_55_chunk_32", "original_index": 32, "pid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_32"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n\n\nThis chunk defines the `Module` class, which is the module node in the Abstract Syntax Tree (AST) of the WasmEdge project. It includes the declaration of the `Module` class and its member functions, such as getters and setters for the module's magic number, version, and various sections.", "metadata": {"chunk_id": "doc_56_chunk_0", "original_index": 0, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_0"}, "type": "Document"} +{"page_content": " /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n\n\nThe provided chunk is part of the `Module` class definition within the `WasmEdge::AST` namespace. It contains the getter methods for various sections of the WebAssembly module, such as custom sections, type section, import section, and function section.", "metadata": {"chunk_id": "doc_56_chunk_1", "original_index": 1, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_1"}, "type": "Document"} +{"page_content": " FunctionSection &getFunctionSection() { return FunctionSec; }\n const TableSection &getTableSection() const { return TableSec; }\n TableSection &getTableSection() { return TableSec; }\n const MemorySection &getMemorySection() const { return MemorySec; }\n MemorySection &getMemorySection() { return MemorySec; }\n const GlobalSection &getGlobalSection() const { return GlobalSec; }\n GlobalSection &getGlobalSection() { return GlobalSec; }\n const ExportSection &getExportSection() const { return ExportSec; }\n ExportSection &getExportSection() { return ExportSec; }\n const StartSection &getStartSection() const { return StartSec; }\n StartSection &getStartSection() { return StartSec; }\n const ElementSection &getElementSection() const { return ElementSec; }\n ElementSection &getElementSection() { return ElementSec; }\n const CodeSection &getCodeSection() const { return CodeSec; }\n\n\nThe chunk represents the getter and setter methods for various sections of a WebAssembly module, such as the Function, Table, Memory, Global, Export, Start, Element, and Code sections.", "metadata": {"chunk_id": "doc_56_chunk_2", "original_index": 2, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_2"}, "type": "Document"} +{"page_content": " CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n\n\nThe provided chunk contains the getter and setter methods for various sections of the Module class, including the CodeSection, DataSection, DataCountSection, and AOTSection. It also includes the getter and setter methods for the compiled symbol and the validated flag of the Module class.", "metadata": {"chunk_id": "doc_56_chunk_3", "original_index": 3, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_3"}, "type": "Document"} +{"page_content": " /// \\name Section nodes of Module node.\n /// @{\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec;\n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec;\n DataSection DataSec;\n DataCountSection DataCountSec;\n /// @}\n\n /// \\name Data of AOT.\n /// @{\n AOTSection AOTSec;\n Symbol IntrSymbol;\n /// @}\n\n /// \\name Validated flag.\n /// @{\n bool IsValidated = false;\n /// @}\n};\n\nclass CoreModuleSection : public Section {\npublic:\n /// Getter of content.\n const Module &getContent() const noexcept { return Content; }\n Module &getContent() noexcept { return Content; }\n\nprivate:\n Module Content;\n};\n\nnamespace Component {\n\n\n\nThe provided chunk defines the section nodes, AOT data, and validated flag of the Module class, as well as the CoreModuleSection class, which is a part of the WasmEdge AST namespace. This information is crucial for understanding the structure and functionality of the WebAssembly module representation in the WasmEdge project.", "metadata": {"chunk_id": "doc_56_chunk_4", "original_index": 4, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_4"}, "type": "Document"} +{"page_content": "class Component {\n using Section =\n std::variant;\n\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n\n\nThe provided chunk defines a `Component` class within the `WasmEdge::AST::Component` namespace. This class represents a component in the WebAssembly ecosystem and contains methods to access the magic, version, and layer vectors of the component.", "metadata": {"chunk_id": "doc_56_chunk_5", "original_index": 5, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_5"}, "type": "Document"} +{"page_content": " /// Getter of layer vector.\n const std::vector &getLayer() const noexcept { return Layer; }\n std::vector &getLayer() noexcept { return Layer; }\n\n std::vector

&getSections() noexcept { return Secs; }\n Span getSections() const noexcept { return Secs; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n std::vector Layer;\n\n std::vector
Secs;\n /// @}\n};\n\n} // namespace Component\n\n} // namespace AST\n} // namespace WasmEdge\n\n\nThis chunk defines the `Component` class within the `WasmEdge::AST` namespace, which includes methods to access the magic, version, and layer vectors, as well as the sections of the component.", "metadata": {"chunk_id": "doc_56_chunk_6", "original_index": 6, "pid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_6"}, "type": "Document"} +{"page_content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\n\n\nThis chunk contains the implementation of several classes related to logging functionality in the WasmEdgeFFmpeg namespace of the WasmEdge project. These classes provide methods for setting and getting the log level, as well as setting and getting the log flags.", "metadata": {"chunk_id": "doc_57_chunk_0", "original_index": 0, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_0"}, "type": "Document"} +{"page_content": "class AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n\n\nThis chunk contains classes related to AVUtil functionality in the WasmEdge FFmpeg namespace, including classes for setting and getting log levels and flags, as well as various option-related functions.", "metadata": {"chunk_id": "doc_57_chunk_1", "original_index": 1, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_1"}, "type": "Document"} +{"page_content": "class AVOptSetDouble : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetDouble(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetImageSize : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetImageSize(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n\n\nThis chunk contains classes related to setting various options in the WasmEdgeFFmpeg library, including setting double values, setting rational values, and setting image size.", "metadata": {"chunk_id": "doc_57_chunk_2", "original_index": 2, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_2"}, "type": "Document"} +{"page_content": "class AVOptSetPixelFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetPixelFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetSampleFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetSampleFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n\n\nThe provided chunk contains two classes, `AVOptSetPixelFmt` and `AVOptSetSampleFmt`, which are part of the `WasmEdge::Host::WasmEdgeFFmpeg::AVUtil` namespace. These classes likely provide functionality related to setting pixel and sample formats in the context of the WasmEdge FFmpeg environment.", "metadata": {"chunk_id": "doc_57_chunk_3", "original_index": 3, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_3"}, "type": "Document"} +{"page_content": "class AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\n\n\nThe provided chunk contains classes related to AVUtil functionality in the WasmEdge FFmpeg namespace, including classes for setting channel layout, rescaling, and rounding operations.", "metadata": {"chunk_id": "doc_57_chunk_4", "original_index": 4, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_4"}, "type": "Document"} +{"page_content": "class AVUtilVersion : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilVersion(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &);\n};\n\nclass AVGetChannelLayoutNbChannels\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNbChannels(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutNameLen\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNameLen(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n\n\nThe provided chunk contains classes related to AVUtil functionality in the WasmEdge FFmpeg namespace, including classes for getting the AVUtil version, channel layout information, and related utility functions.", "metadata": {"chunk_id": "doc_57_chunk_5", "original_index": 5, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_5"}, "type": "Document"} +{"page_content": "class AVGetChannelLayoutName\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutName(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId, uint32_t NamePtr,\n uint32_t NameLen);\n};\n\nclass AVGetChannelLayoutMask\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutMask(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n\n\nThe provided chunk contains two classes, `AVGetChannelLayoutName` and `AVGetChannelLayoutMask`, which are part of the `WasmEdge::Host::WasmEdgeFFmpeg::AVUtil` namespace. These classes likely provide functionality related to channel layout management and manipulation within the WasmEdge FFmpeg environment.", "metadata": {"chunk_id": "doc_57_chunk_6", "original_index": 6, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_6"}, "type": "Document"} +{"page_content": "class AVGetDefaultChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetDefaultChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n int32_t ChannelLayoutId);\n};\n\nclass AVUtilConfigurationLength\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfigurationLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n\n\nThe provided chunk contains two classes, `AVGetDefaultChannelLayout` and `AVUtilConfigurationLength`, which are part of the `WasmEdge::Host::WasmEdgeFFmpeg::AVUtil` namespace. These classes appear to be related to FFmpeg utility functions within the WasmEdge runtime environment.", "metadata": {"chunk_id": "doc_57_chunk_7", "original_index": 7, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_7"}, "type": "Document"} +{"page_content": "class AVUtilConfiguration : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfiguration(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t ConfigPtr,\n uint32_t ConfigLen);\n};\n\nclass AVUtilLicenseLength : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicenseLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilLicense : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicense(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t LicensePtr,\n uint32_t LicenseLen);\n};\n\n} // namespace AVUtil\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n\n\nThe provided chunk contains the implementation of three classes: `AVUtilConfiguration`, `AVUtilLicenseLength`, and `AVUtilLicense`, which are part of the `WasmEdge::Host::WasmEdgeFFmpeg::AVUtil` namespace. These classes likely provide utility functions related to the configuration and licensing of the FFmpeg library within the WasmEdge runtime environment.", "metadata": {"chunk_id": "doc_57_chunk_8", "original_index": 8, "pid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_8"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n\n\nThis chunk appears to be the header file for a parser component in the WasmEdge project, which is responsible for handling argument errors.", "metadata": {"chunk_id": "doc_58_chunk_0", "original_index": 0, "pid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_0"}, "type": "Document"} +{"page_content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n\n\nThe provided chunk defines the implementation of the `Error` class within the `WasmEdge::PO` namespace, which is used to handle argument errors in the WasmEdge project.", "metadata": {"chunk_id": "doc_58_chunk_1", "original_index": 1, "pid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_1"}, "type": "Document"} +{"page_content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n\n\nThis chunk of code is part of the implementation of the AVFormat namespace within the WasmEdgeFFmpeg module of the WasmEdge Host namespace. It contains functions for interacting with the AVChapter data structure from the FFmpeg library, including getting and setting the chapter ID, timebase, start and end times, and metadata.", "metadata": {"chunk_id": "doc_59_chunk_0", "original_index": 0, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_0"}, "type": "Document"} +{"page_content": " FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n\n\nThis chunk of code is part of the `AVChapterSetId` and `AVChapterTimebase` functions within the `AVFormat` namespace of the `WasmEdgeFFmpeg` module in the `WasmEdge` namespace. These functions are responsible for setting the ID and timebase of a specific chapter within an `AVFormatContext` object.", "metadata": {"chunk_id": "doc_59_chunk_1", "original_index": 1, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_1"}, "type": "Document"} +{"page_content": " AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\n\n\nThe provided chunk is part of the implementation of the AVChapterTimebase and AVChapterSetTimebase functions within the WasmEdge::Host::WasmEdgeFFmpeg::AVFormat namespace. These functions are responsible for retrieving and setting the time base of a specific chapter within an AVFormatContext.", "metadata": {"chunk_id": "doc_59_chunk_2", "original_index": 2, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_2"}, "type": "Document"} +{"page_content": "Expect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\n\n\nThe provided chunk is a part of the `AVChapterStart` function within the `AVFormat` namespace of the `WasmEdgeFFmpeg` module in the `WasmEdge` namespace. This function retrieves the start time of a specific chapter in an `AVFormatContext` object.", "metadata": {"chunk_id": "doc_59_chunk_3", "original_index": 3, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_3"}, "type": "Document"} +{"page_content": "Expect AVChapterSetStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n int64_t StartValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->start = StartValue;\n return static_cast(ErrNo::Success);\n}\n\n\n\nThe provided chunk is a function definition within the `WasmEdge::Host::WasmEdgeFFmpeg::AVFormat` namespace that sets the start time of a chapter in an `AVFormatContext` object.", "metadata": {"chunk_id": "doc_59_chunk_4", "original_index": 4, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_4"}, "type": "Document"} +{"page_content": "Expect AVChapterEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->end;\n}\n\n\n\nThe provided chunk is a part of the `AVChapterEnd` function implementation within the `WasmEdge::Host::WasmEdgeFFmpeg::AVFormat` namespace. This function retrieves the end timestamp of a specific chapter in an `AVFormatContext` object.", "metadata": {"chunk_id": "doc_59_chunk_5", "original_index": 5, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_5"}, "type": "Document"} +{"page_content": "Expect AVChapterSetEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t EndValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->end = EndValue;\n return static_cast(ErrNo::Success);\n}\n\n\n\nThe provided chunk is a function definition for the `AVChapterSetEnd` function, which is part of the `AVFormat` namespace within the `WasmEdgeFFmpeg` namespace of the `WasmEdge` host environment. This function sets the end value of a specific chapter in an `AVFormatContext` object.", "metadata": {"chunk_id": "doc_59_chunk_6", "original_index": 6, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_6"}, "type": "Document"} +{"page_content": "Expect AVChapterMetadata::body(const Runtime::CallingFrame &Frame,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, uint32_t DictPtr) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(DictId, MemInst, uint32_t, DictPtr,\n \"Failed when accessing the return AVDictionary memory\"sv);\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n\n AVDictionary **AvDictionary =\n static_cast(av_malloc(sizeof(AVDictionary *)));\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n\n\nThe provided chunk is part of the `AVChapterMetadata` function within the `WasmEdge::Host::WasmEdgeFFmpeg::AVFormat` namespace. This function is responsible for retrieving the metadata associated with a specific chapter in an `AVFormatContext`.", "metadata": {"chunk_id": "doc_59_chunk_7", "original_index": 7, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_7"}, "type": "Document"} +{"page_content": " // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n *AvDictionary = (*AvChapter)->metadata;\n FFMPEG_PTR_STORE(AvDictionary, DictId);\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetMetadata::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n uint32_t DictId) {\n\n\n\nThe provided chunk is part of the implementation of the `AVChapterMetadata` and `AVChapterSetMetadata` functions within the `WasmEdge::Host::WasmEdgeFFmpeg::AVFormat` namespace. These functions are responsible for retrieving and setting the metadata associated with a specific chapter in an `AVFormatContext` object.", "metadata": {"chunk_id": "doc_59_chunk_8", "original_index": 8, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_8"}, "type": "Document"} +{"page_content": " FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n FFMPEG_PTR_FETCH(AvDictionary, DictId, AVDictionary *);\n\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n if (AvDictionary == nullptr)\n (*AvChapter)->metadata = nullptr;\n else\n (*AvChapter)->metadata = *AvDictionary;\n return static_cast(ErrNo::Success);\n}\n\n} // namespace AVFormat\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n\n\nThe provided chunk is part of the implementation of the `AVChapterSetMetadata` function within the `AVFormat` namespace of the `WasmEdgeFFmpeg` module in the `WasmEdge` namespace. This function is responsible for setting the metadata of a specific chapter in an `AVFormatContext` object.", "metadata": {"chunk_id": "doc_59_chunk_9", "original_index": 9, "pid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_9"}, "type": "Document"} +{"page_content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"asymmetric_common/publickey.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace AsymmetricCommon {\n\nWasiCryptoExpect\nimportPk(AsymmetricCommon::Algorithm Alg, Span Encoded,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [=](auto Factory) noexcept -> WasiCryptoExpect {\n return decltype(Factory)::PublicKey::import(Encoded, Encoding);\n },\n Alg);\n}\n\n\n\nThis chunk of code defines a function `importPk` within the `WasmEdge::Host::WasiCrypto::AsymmetricCommon` namespace, which is responsible for importing a public key from an encoded representation.", "metadata": {"chunk_id": "doc_60_chunk_0", "original_index": 0, "pid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26_0"}, "type": "Document"} +{"page_content": "WasiCryptoExpect>\npkExportData(const PkVariant &PkVariant,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [Encoding](const auto &Pk) noexcept { return Pk.exportData(Encoding); },\n PkVariant);\n}\n\nWasiCryptoExpect pkVerify(const PkVariant &PkVariant) noexcept {\n return std::visit([](const auto &Pk) noexcept { return Pk.verify(); },\n PkVariant);\n}\n\n} // namespace AsymmetricCommon\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n\n\nThe provided chunk contains two functions, `pkExportData` and `pkVerify`, which are part of the `AsymmetricCommon` namespace within the `WasiCrypto` namespace of the `WasmEdge` project. These functions are responsible for exporting public key data and verifying public keys, respectively.", "metadata": {"chunk_id": "doc_60_chunk_1", "original_index": 1, "pid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26_1"}, "type": "Document"} +{"page_content": "use std::ffi::OsStr;\nuse std::io::{self, Error, ErrorKind, Result};\nuse std::iter::once;\nuse std::os::windows::ffi::OsStrExt;\nuse std::sync::mpsc::TryRecvError;\nuse std::sync::Arc;\n\nuse crate::event::{OnResize, WindowSize};\nuse crate::tty::windows::child::ChildExitWatcher;\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options, Shell};\n\nmod blocking;\nmod child;\nmod conpty;\n\nuse blocking::{UnblockedReader, UnblockedWriter};\nuse conpty::Conpty as Backend;\nuse miow::pipe::{AnonRead, AnonWrite};\nuse polling::{Event, Poller};\n\npub const PTY_CHILD_EVENT_TOKEN: usize = 1;\npub const PTY_READ_WRITE_TOKEN: usize = 2;\n\ntype ReadPipe = UnblockedReader;\ntype WritePipe = UnblockedWriter;\n\n\n\nThis chunk contains the core definitions and imports for the Pty struct and related types in a Windows-specific terminal emulator implementation.", "metadata": {"chunk_id": "doc_61_chunk_0", "original_index": 0, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_0"}, "type": "Document"} +{"page_content": "pub struct Pty {\n // XXX: Backend is required to be the first field, to ensure correct drop order. Dropping\n // `conout` before `backend` will cause a deadlock (with Conpty).\n backend: Backend,\n conout: ReadPipe,\n conin: WritePipe,\n child_watcher: ChildExitWatcher,\n}\n\npub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result {\n conpty::new(config, window_size)\n .ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n}\n\n\n\nThe chunk defines the `Pty` struct and a function to create a new `Pty` instance, which are part of the implementation of a PTY (Pseudo-Terminal) in a Windows-based terminal emulator.", "metadata": {"chunk_id": "doc_61_chunk_1", "original_index": 1, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_1"}, "type": "Document"} +{"page_content": "impl Pty {\n fn new(\n backend: impl Into,\n conout: impl Into,\n conin: impl Into,\n child_watcher: ChildExitWatcher,\n ) -> Self {\n Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }\n }\n\n pub fn child_watcher(&self) -> &ChildExitWatcher {\n &self.child_watcher\n }\n}\n\nfn with_key(mut event: Event, key: usize) -> Event {\n event.key = key;\n event\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = ReadPipe;\n type Writer = WritePipe;\n\n\n\nThe provided chunk defines the implementation of the `Pty` struct, which is responsible for managing the child process and its input/output channels. It includes the `new` constructor, a method to access the child watcher, and the implementation of the `EventedReadWrite` trait.", "metadata": {"chunk_id": "doc_61_chunk_2", "original_index": 2, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_2"}, "type": "Document"} +{"page_content": " #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n\n\nThe provided chunk is part of the `EventedReadWrite` trait implementation for the `Pty` struct, which is responsible for registering the `Pty` instance with a `Poller` to enable event-driven I/O operations.", "metadata": {"chunk_id": "doc_61_chunk_3", "original_index": 3, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_3"}, "type": "Document"} +{"page_content": " Ok(())\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n\n\nThe provided chunk is part of the `EventedReadWrite` trait implementation for the `Pty` struct, which is responsible for registering and re-registering the `Pty` instance with a `Poller` for event monitoring.", "metadata": {"chunk_id": "doc_61_chunk_4", "original_index": 4, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_4"}, "type": "Document"} +{"page_content": " Ok(())\n }\n\n #[inline]\n fn deregister(&mut self, _poll: &Arc) -> io::Result<()> {\n self.conin.deregister();\n self.conout.deregister();\n self.child_watcher.deregister();\n\n Ok(())\n }\n\n #[inline]\n fn reader(&mut self) -> &mut Self::Reader {\n &mut self.conout\n }\n\n #[inline]\n fn writer(&mut self) -> &mut Self::Writer {\n &mut self.conin\n }\n}\n\n\n\nThe provided chunk is part of the implementation of the `EventedReadWrite` trait for the `Pty` struct, which is responsible for registering, re-registering, and deregistering the `Pty` instance with a `Poller`, as well as providing access to the reader and writer components of the `Pty` instance.", "metadata": {"chunk_id": "doc_61_chunk_5", "original_index": 5, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_5"}, "type": "Document"} +{"page_content": "impl EventedPty for Pty {\n fn next_child_event(&mut self) -> Option {\n match self.child_watcher.event_rx().try_recv() {\n Ok(ev) => Some(ev),\n Err(TryRecvError::Empty) => None,\n Err(TryRecvError::Disconnected) => Some(ChildEvent::Exited(None)),\n }\n }\n}\n\nimpl OnResize for Pty {\n fn on_resize(&mut self, window_size: WindowSize) {\n self.backend.on_resize(window_size)\n }\n}\n\nfn cmdline(config: &Options) -> String {\n let default_shell = Shell::new(\"powershell\".to_owned(), Vec::new());\n let shell = config.shell.as_ref().unwrap_or(&default_shell);\n\n\n\nThe provided chunk is an implementation of the `EventedPty` and `OnResize` traits for the `Pty` struct, as well as a function to construct the command line for the shell based on the provided configuration.", "metadata": {"chunk_id": "doc_61_chunk_6", "original_index": 6, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_6"}, "type": "Document"} +{"page_content": " once(shell.program.as_str())\n .chain(shell.args.iter().map(|s| s.as_str()))\n .collect::>()\n .join(\" \")\n}\n\n/// Converts the string slice into a Windows-standard representation for \"W\"-\n/// suffixed function variants, which accept UTF-16 encoded string values.\npub fn win32_string + ?Sized>(value: &S) -> Vec {\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n}\n\n\nThe provided chunk is a part of the `cmdline` function, which constructs the command line for the default shell based on the configuration options. The `win32_string` function is a utility function that converts a string slice into a Windows-standard representation for \"W\"-suffixed function variants, which accept UTF-16 encoded string values.", "metadata": {"chunk_id": "doc_61_chunk_7", "original_index": 7, "pid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_7"}, "type": "Document"} +{"page_content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n\n\nThis chunk provides a function to merge two Serde structures, handling arrays and tables. It is part of a set of helper functions for working with Serde, a popular serialization library in Rust.", "metadata": {"chunk_id": "doc_62_chunk_0", "original_index": 0, "pid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_0"}, "type": "Document"} +{"page_content": "/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n\n\nThe provided chunk is a part of a Rust module that contains helper functions for merging Serde structures. The `merge_tables` function is responsible for merging two key-value tables, and the `merge_primitive` test function is used to test the behavior of the `merge` function when dealing with primitive values.", "metadata": {"chunk_id": "doc_62_chunk_1", "original_index": 1, "pid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_1"}, "type": "Document"} +{"page_content": " let base = Value::Boolean(false);\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Integer(0.into());\n let replacement = Value::Integer(1.into());\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::String(String::new());\n let replacement = Value::String(String::from(\"test\"));\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Table(Table::new());\n let replacement = Value::Table(Table::new());\n assert_eq!(merge(base.clone(), replacement), base);\n }\n\n\n\nThis chunk tests the `merge` function's behavior when merging primitive values such as booleans, integers, strings, and empty tables.", "metadata": {"chunk_id": "doc_62_chunk_2", "original_index": 2, "pid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_2"}, "type": "Document"} +{"page_content": " #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n\n\nThe provided chunk contains two test cases for the `merge` function, which is responsible for merging two Serde structures. The first test case checks the behavior of merging two arrays, while the second test case checks the behavior of merging two tables.", "metadata": {"chunk_id": "doc_62_chunk_3", "original_index": 3, "pid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_3"}, "type": "Document"} +{"page_content": " let mut expected_table = Table::new();\n expected_table.insert(String::from(\"b\"), Value::Boolean(false));\n expected_table.insert(String::from(\"a\"), Value::Boolean(true));\n expected_table.insert(String::from(\"c\"), Value::Boolean(false));\n let expected = Value::Table(expected_table);\n\n assert_eq!(merged, expected);\n }\n}\n\n\nThis chunk is part of a test case that verifies the behavior of the `merge` function, which is responsible for merging two Serde structures. The test case checks that the `merge` function correctly handles the merging of two tables, where the replacement table overrides values in the base table, and also adds new key-value pairs.", "metadata": {"chunk_id": "doc_62_chunk_4", "original_index": 4, "pid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_4"}, "type": "Document"} +{"page_content": "//! TTY related functionality.\n\nuse std::ffi::CStr;\nuse std::fs::File;\nuse std::io::{Error, ErrorKind, Read, Result};\nuse std::mem::MaybeUninit;\nuse std::os::unix::io::{AsRawFd, FromRawFd};\nuse std::os::unix::net::UnixStream;\nuse std::os::unix::process::CommandExt;\nuse std::process::{Child, Command, Stdio};\nuse std::sync::Arc;\nuse std::{env, ptr};\n\nuse libc::{c_int, TIOCSCTTY};\nuse log::error;\nuse polling::{Event, PollMode, Poller};\nuse rustix_openpty::openpty;\nuse rustix_openpty::rustix::termios::Winsize;\n#[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\nuse rustix_openpty::rustix::termios::{self, InputModes, OptionalActions};\nuse signal_hook::consts as sigconsts;\nuse signal_hook::low_level::pipe as signal_pipe;\n\n\n\nThis chunk contains the TTY-related functionality, including the use of the `std`, `libc`, `log`, `polling`, `rustix_openpty`, and `signal_hook` crates to handle the creation and management of a PTY (Pseudo-Terminal) and its associated child process.", "metadata": {"chunk_id": "doc_63_chunk_0", "original_index": 0, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_0"}, "type": "Document"} +{"page_content": "use crate::event::{OnResize, WindowSize};\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options};\n\n// Interest in PTY read/writes.\npub(crate) const PTY_READ_WRITE_TOKEN: usize = 0;\n\n// Interest in new child events.\npub(crate) const PTY_CHILD_EVENT_TOKEN: usize = 1;\n\nmacro_rules! die {\n ($($arg:tt)*) => {{\n error!($($arg)*);\n std::process::exit(1);\n }}\n}\n\n/// Really only needed on BSD, but should be fine elsewhere.\nfn set_controlling_terminal(fd: c_int) {\n let res = unsafe {\n // TIOSCTTY changes based on platform and the `ioctl` call is different\n // based on architecture (32/64). So a generic cast is used to make sure\n // there are no issues. To allow such a generic cast the clippy warning\n // is disabled.\n #[allow(clippy::cast_lossless)]\n libc::ioctl(fd, TIOCSCTTY as _, 0)\n };\n\n\n\nThe chunk is part of the TTY-related functionality in the Rust codebase. It includes definitions of constants, a macro, and a function related to setting the controlling terminal for a process.", "metadata": {"chunk_id": "doc_63_chunk_1", "original_index": 1, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_1"}, "type": "Document"} +{"page_content": " if res < 0 {\n die!(\"ioctl TIOCSCTTY failed: {}\", Error::last_os_error());\n }\n}\n\n#[derive(Debug)]\nstruct Passwd<'a> {\n name: &'a str,\n dir: &'a str,\n shell: &'a str,\n}\n\n/// Return a Passwd struct with pointers into the provided buf.\n///\n/// # Unsafety\n///\n/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.\nfn get_pw_entry(buf: &mut [i8; 1024]) -> Result> {\n // Create zeroed passwd struct.\n let mut entry: MaybeUninit = MaybeUninit::uninit();\n\n let mut res: *mut libc::passwd = ptr::null_mut();\n\n // Try and read the pw file.\n let uid = unsafe { libc::getuid() };\n let status = unsafe {\n libc::getpwuid_r(uid, entry.as_mut_ptr(), buf.as_mut_ptr() as *mut _, buf.len(), &mut res)\n };\n let entry = unsafe { entry.assume_init() };\n\n\n\nThe chunk is part of a TTY-related functionality implementation in Rust. It contains a function `set_controlling_terminal` that sets the controlling terminal for a file descriptor, and a function `get_pw_entry` that retrieves the user's password entry information.", "metadata": {"chunk_id": "doc_63_chunk_2", "original_index": 2, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_2"}, "type": "Document"} +{"page_content": " if status < 0 {\n return Err(Error::new(ErrorKind::Other, \"getpwuid_r failed\"));\n }\n\n if res.is_null() {\n return Err(Error::new(ErrorKind::Other, \"pw not found\"));\n }\n\n // Sanity check.\n assert_eq!(entry.pw_uid, uid);\n\n // Build a borrowed Passwd struct.\n Ok(Passwd {\n name: unsafe { CStr::from_ptr(entry.pw_name).to_str().unwrap() },\n dir: unsafe { CStr::from_ptr(entry.pw_dir).to_str().unwrap() },\n shell: unsafe { CStr::from_ptr(entry.pw_shell).to_str().unwrap() },\n })\n}\n\n\n\nThe chunk is part of the `get_pw_entry` function, which is responsible for retrieving the user's password entry information, including the username, home directory, and shell.", "metadata": {"chunk_id": "doc_63_chunk_3", "original_index": 3, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_3"}, "type": "Document"} +{"page_content": "pub struct Pty {\n child: Child,\n file: File,\n signals: UnixStream,\n}\n\nimpl Pty {\n pub fn child(&self) -> &Child {\n &self.child\n }\n\n pub fn file(&self) -> &File {\n &self.file\n }\n}\n\n/// User information that is required for a new shell session.\nstruct ShellUser {\n user: String,\n home: String,\n shell: String,\n}\n\nimpl ShellUser {\n /// look for shell, username, longname, and home dir in the respective environment variables\n /// before falling back on looking in to `passwd`.\n fn from_env() -> Result {\n let mut buf = [0; 1024];\n let pw = get_pw_entry(&mut buf);\n\n\n\nThe chunk represents the definition of the `Pty` struct and the `ShellUser` struct, which are used to manage the creation and interaction with a PTY (Pseudo-Terminal) in the TTY-related functionality of the application.", "metadata": {"chunk_id": "doc_63_chunk_4", "original_index": 4, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_4"}, "type": "Document"} +{"page_content": " let user = match env::var(\"USER\") {\n Ok(user) => user,\n Err(_) => match pw {\n Ok(ref pw) => pw.name.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let home = match env::var(\"HOME\") {\n Ok(home) => home,\n Err(_) => match pw {\n Ok(ref pw) => pw.dir.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let shell = match env::var(\"SHELL\") {\n Ok(shell) => shell,\n Err(_) => match pw {\n Ok(ref pw) => pw.shell.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n Ok(Self { user, home, shell })\n }\n}\n\n#[cfg(not(target_os = \"macos\"))]\nfn default_shell_command(shell: &str, _user: &str) -> Command {\n Command::new(shell)\n}\n\n\n\nThe chunk is part of the `ShellUser` struct implementation, which is responsible for retrieving user information required for a new shell session.", "metadata": {"chunk_id": "doc_63_chunk_5", "original_index": 5, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_5"}, "type": "Document"} +{"page_content": "#[cfg(target_os = \"macos\")]\nfn default_shell_command(shell: &str, user: &str) -> Command {\n let shell_name = shell.rsplit('/').next().unwrap();\n\n // On macOS, use the `login` command so the shell will appear as a tty session.\n let mut login_command = Command::new(\"/usr/bin/login\");\n\n // Exec the shell with argv[0] prepended by '-' so it becomes a login shell.\n // `login` normally does this itself, but `-l` disables this.\n let exec = format!(\"exec -a -{} {}\", shell_name, shell);\n\n\n\nThe chunk is part of the `default_shell_command` function that is used to create a new shell process on macOS systems. This function is responsible for setting up the appropriate command and arguments to launch the user's default shell as a login shell.", "metadata": {"chunk_id": "doc_63_chunk_6", "original_index": 6, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_6"}, "type": "Document"} +{"page_content": " // -f: Bypasses authentication for the already-logged-in user.\n // -l: Skips changing directory to $HOME and prepending '-' to argv[0].\n // -p: Preserves the environment.\n //\n // XXX: we use zsh here over sh due to `exec -a`.\n login_command.args([\"-flp\", user, \"/bin/zsh\", \"-c\", &exec]);\n login_command\n}\n\n/// Create a new TTY and return a handle to interact with it.\npub fn new(config: &Options, window_size: WindowSize, window_id: u64) -> Result {\n let pty = openpty(None, Some(&window_size.to_winsize()))?;\n let (master, slave) = (pty.controller, pty.user);\n let master_fd = master.as_raw_fd();\n let slave_fd = slave.as_raw_fd();\n\n\n\nThe chunk describes the configuration and setup of a new TTY (terminal) session, including the command-line arguments passed to the login command and the creation of the PTY (pseudo-terminal) using the `openpty` function.", "metadata": {"chunk_id": "doc_63_chunk_7", "original_index": 7, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_7"}, "type": "Document"} +{"page_content": " #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n if let Ok(mut termios) = termios::tcgetattr(&master) {\n // Set character encoding to UTF-8.\n termios.input_modes.set(InputModes::IUTF8, true);\n let _ = termios::tcsetattr(&master, OptionalActions::Now, &termios);\n }\n\n let user = ShellUser::from_env()?;\n\n let mut builder = if let Some(shell) = config.shell.as_ref() {\n let mut cmd = Command::new(&shell.program);\n cmd.args(shell.args.as_slice());\n cmd\n } else {\n default_shell_command(&user.shell, &user.user)\n };\n\n\n\nThis chunk sets up the shell environment and configuration for the new PTY (Pseudo-Terminal) process.", "metadata": {"chunk_id": "doc_63_chunk_8", "original_index": 8, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_8"}, "type": "Document"} +{"page_content": " // Setup child stdin/stdout/stderr as slave fd of PTY.\n // Ownership of fd is transferred to the Stdio structs and will be closed by them at the end of\n // this scope. (It is not an issue that the fd is closed three times since File::drop ignores\n // error on libc::close.).\n builder.stdin(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stderr(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stdout(unsafe { Stdio::from_raw_fd(slave_fd) });\n\n\n\nThe chunk sets up the child process's stdin, stdout, and stderr to use the slave file descriptor of the PTY.", "metadata": {"chunk_id": "doc_63_chunk_9", "original_index": 9, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_9"}, "type": "Document"} +{"page_content": " // Setup shell environment.\n let window_id = window_id.to_string();\n builder.env(\"ALACRITTY_WINDOW_ID\", &window_id);\n builder.env(\"USER\", user.user);\n builder.env(\"HOME\", user.home);\n // Set Window ID for clients relying on X11 hacks.\n builder.env(\"WINDOWID\", window_id);\n for (key, value) in &config.env {\n builder.env(key, value);\n }\n\n unsafe {\n builder.pre_exec(move || {\n // Create a new process group.\n let err = libc::setsid();\n if err == -1 {\n return Err(Error::new(ErrorKind::Other, \"Failed to set session id\"));\n }\n\n set_controlling_terminal(slave_fd);\n\n // No longer need slave/master fds.\n libc::close(slave_fd);\n libc::close(master_fd);\n\n\n\nThis chunk sets up the shell environment and performs necessary setup for the child process before spawning it.", "metadata": {"chunk_id": "doc_63_chunk_10", "original_index": 10, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_10"}, "type": "Document"} +{"page_content": " libc::signal(libc::SIGCHLD, libc::SIG_DFL);\n libc::signal(libc::SIGHUP, libc::SIG_DFL);\n libc::signal(libc::SIGINT, libc::SIG_DFL);\n libc::signal(libc::SIGQUIT, libc::SIG_DFL);\n libc::signal(libc::SIGTERM, libc::SIG_DFL);\n libc::signal(libc::SIGALRM, libc::SIG_DFL);\n\n Ok(())\n });\n }\n\n // Handle set working directory option.\n if let Some(dir) = &config.working_directory {\n builder.current_dir(dir);\n }\n\n // Prepare signal handling before spawning child.\n let signals = {\n let (sender, recv) = UnixStream::pair()?;\n\n\n\nThe chunk is part of the `new` function in the `Pty` struct, which is responsible for creating a new TTY and returning a handle to interact with it. The chunk sets up signal handling for the child process before spawning it.", "metadata": {"chunk_id": "doc_63_chunk_11", "original_index": 11, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_11"}, "type": "Document"} +{"page_content": " // Register the recv end of the pipe for SIGCHLD.\n signal_pipe::register(sigconsts::SIGCHLD, sender)?;\n recv.set_nonblocking(true)?;\n recv\n };\n\n match builder.spawn() {\n Ok(child) => {\n unsafe {\n // Maybe this should be done outside of this function so nonblocking\n // isn't forced upon consumers. Although maybe it should be?\n set_nonblocking(master_fd);\n }\n\n Ok(Pty { child, file: File::from(master), signals })\n },\n Err(err) => Err(Error::new(\n err.kind(),\n format!(\n \"Failed to spawn command '{}': {}\",\n builder.get_program().to_string_lossy(),\n err\n ),\n )),\n }\n}\n\n\n\nThis chunk is part of the `new()` function in the `Pty` struct, which is responsible for creating a new TTY and returning a handle to interact with it.", "metadata": {"chunk_id": "doc_63_chunk_12", "original_index": 12, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_12"}, "type": "Document"} +{"page_content": "impl Drop for Pty {\n fn drop(&mut self) {\n // Make sure the PTY is terminated properly.\n unsafe {\n libc::kill(self.child.id() as i32, libc::SIGHUP);\n }\n let _ = self.child.wait();\n }\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = File;\n type Writer = File;\n\n #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n unsafe {\n poll.add_with_mode(&self.file, interest, poll_opts)?;\n }\n\n\n\nThe chunk is part of the implementation of the `Pty` struct, which is responsible for managing the PTY (Pseudo-Terminal) and its associated child process. The `Drop` implementation ensures that the PTY is terminated properly when the `Pty` instance is dropped. The `EventedReadWrite` implementation provides methods for registering and deregistering the PTY's file and signal pipe with a `Poller` for event-driven I/O.", "metadata": {"chunk_id": "doc_63_chunk_13", "original_index": 13, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_13"}, "type": "Document"} +{"page_content": " unsafe {\n poll.add_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n poll.modify_with_mode(&self.file, interest, poll_opts)?;\n\n\n\nThe provided chunk is part of the `EventedReadWrite` trait implementation for the `Pty` struct, which is responsible for registering and re-registering the PTY's file and signal pipe with the event poller.", "metadata": {"chunk_id": "doc_63_chunk_14", "original_index": 14, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_14"}, "type": "Document"} +{"page_content": " poll.modify_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n\n #[inline]\n fn deregister(&mut self, poll: &Arc) -> Result<()> {\n poll.delete(&self.file)?;\n poll.delete(&self.signals)\n }\n\n #[inline]\n fn reader(&mut self) -> &mut File {\n &mut self.file\n }\n\n #[inline]\n fn writer(&mut self) -> &mut File {\n &mut self.file\n }\n}\n\n\n\nThe provided chunk is part of the implementation of the `EventedReadWrite` trait for the `Pty` struct, which is responsible for managing the read and write operations of a PTY (Pseudo-Terminal) in the TTY-related functionality of the application.", "metadata": {"chunk_id": "doc_63_chunk_15", "original_index": 15, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_15"}, "type": "Document"} +{"page_content": "impl EventedPty for Pty {\n #[inline]\n fn next_child_event(&mut self) -> Option {\n // See if there has been a SIGCHLD.\n let mut buf = [0u8; 1];\n if let Err(err) = self.signals.read(&mut buf) {\n if err.kind() != ErrorKind::WouldBlock {\n error!(\"Error reading from signal pipe: {}\", err);\n }\n return None;\n }\n\n // Match on the child process.\n match self.child.try_wait() {\n Err(err) => {\n error!(\"Error checking child process termination: {}\", err);\n None\n },\n Ok(None) => None,\n Ok(exit_status) => Some(ChildEvent::Exited(exit_status.and_then(|s| s.code()))),\n }\n }\n}\n\n\n\nThe provided chunk is an implementation of the `EventedPty` trait for the `Pty` struct, which is responsible for handling the next child event in the terminal emulator.", "metadata": {"chunk_id": "doc_63_chunk_16", "original_index": 16, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_16"}, "type": "Document"} +{"page_content": "impl OnResize for Pty {\n /// Resize the PTY.\n ///\n /// Tells the kernel that the window size changed with the new pixel\n /// dimensions and line/column counts.\n fn on_resize(&mut self, window_size: WindowSize) {\n let win = window_size.to_winsize();\n\n let res = unsafe { libc::ioctl(self.file.as_raw_fd(), libc::TIOCSWINSZ, &win as *const _) };\n\n if res < 0 {\n die!(\"ioctl TIOCSWINSZ failed: {}\", Error::last_os_error());\n }\n }\n}\n\n\n\nThe chunk is part of the implementation of the `Pty` struct, which is responsible for managing a PTY (Pseudo-Terminal) and its associated child process. The `on_resize` method is used to resize the PTY when the window size changes, by updating the kernel with the new pixel dimensions and line/column counts.", "metadata": {"chunk_id": "doc_63_chunk_17", "original_index": 17, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_17"}, "type": "Document"} +{"page_content": "/// Types that can produce a `Winsize`.\npub trait ToWinsize {\n /// Get a `Winsize`.\n fn to_winsize(self) -> Winsize;\n}\n\nimpl ToWinsize for WindowSize {\n fn to_winsize(self) -> Winsize {\n let ws_row = self.num_lines as libc::c_ushort;\n let ws_col = self.num_cols as libc::c_ushort;\n\n let ws_xpixel = ws_col * self.cell_width as libc::c_ushort;\n let ws_ypixel = ws_row * self.cell_height as libc::c_ushort;\n Winsize { ws_row, ws_col, ws_xpixel, ws_ypixel }\n }\n}\n\nunsafe fn set_nonblocking(fd: c_int) {\n use libc::{fcntl, F_GETFL, F_SETFL, O_NONBLOCK};\n\n let res = fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK);\n assert_eq!(res, 0);\n}\n\n#[test]\nfn test_get_pw_entry() {\n let mut buf: [i8; 1024] = [0; 1024];\n let _pw = get_pw_entry(&mut buf).unwrap();\n}\n\n\nThe chunk provides utility functions and traits related to terminal window size and non-blocking file descriptor operations, as well as a test for the `get_pw_entry` function.", "metadata": {"chunk_id": "doc_63_chunk_18", "original_index": 18, "pid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_18"}, "type": "Document"} +{"page_content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n\n\nThis chunk defines a `Mouse` struct and a `MouseBindings` struct, which are used to configure the mouse behavior in an Alacritty terminal emulator application.", "metadata": {"chunk_id": "doc_64_chunk_0", "original_index": 0, "pid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7_0"}, "type": "Document"} +{"page_content": "use log::{debug, warn};\nuse raw_window_handle::RawDisplayHandle;\n\nuse alacritty_terminal::term::ClipboardType;\n\n#[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\nuse copypasta::nop_clipboard::NopClipboardContext;\n#[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::wayland_clipboard;\n#[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::x11_clipboard::{Primary as X11SelectionClipboard, X11ClipboardContext};\n#[cfg(any(feature = \"x11\", target_os = \"macos\", windows))]\nuse copypasta::ClipboardContext;\nuse copypasta::ClipboardProvider;\n\n\n\nThe chunk contains the import statements and related configuration for the clipboard functionality in the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_65_chunk_0", "original_index": 0, "pid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_0"}, "type": "Document"} +{"page_content": "pub struct Clipboard {\n clipboard: Box,\n selection: Option>,\n}\n\nimpl Clipboard {\n pub unsafe fn new(display: RawDisplayHandle) -> Self {\n match display {\n #[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\n RawDisplayHandle::Wayland(display) => {\n let (selection, clipboard) =\n wayland_clipboard::create_clipboards_from_external(display.display);\n Self { clipboard: Box::new(clipboard), selection: Some(Box::new(selection)) }\n },\n _ => Self::default(),\n }\n }\n\n\n\nThe provided chunk defines the `Clipboard` struct and its `new` method, which is responsible for creating a new clipboard instance based on the provided display handle.", "metadata": {"chunk_id": "doc_65_chunk_1", "original_index": 1, "pid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_1"}, "type": "Document"} +{"page_content": " /// Used for tests and to handle missing clipboard provider when built without the `x11`\n /// feature.\n #[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\n pub fn new_nop() -> Self {\n Self { clipboard: Box::new(NopClipboardContext::new().unwrap()), selection: None }\n }\n}\n\nimpl Default for Clipboard {\n fn default() -> Self {\n #[cfg(any(target_os = \"macos\", windows))]\n return Self { clipboard: Box::new(ClipboardContext::new().unwrap()), selection: None };\n\n\n\nThe provided chunk is part of the implementation of the `Clipboard` struct, which is responsible for managing clipboard operations in the Alacritty terminal emulator. The chunk includes a `new_nop()` method that is used for tests and to handle missing clipboard providers when the `x11` feature is not enabled. The `Default` implementation for the `Clipboard` struct is also included in this chunk, which sets up the appropriate clipboard provider based on the target operating system.", "metadata": {"chunk_id": "doc_65_chunk_2", "original_index": 2, "pid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_2"}, "type": "Document"} +{"page_content": " #[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\n return Self {\n clipboard: Box::new(ClipboardContext::new().unwrap()),\n selection: Some(Box::new(X11ClipboardContext::::new().unwrap())),\n };\n\n #[cfg(not(any(feature = \"x11\", target_os = \"macos\", windows)))]\n return Self::new_nop();\n }\n}\n\nimpl Clipboard {\n pub fn store(&mut self, ty: ClipboardType, text: impl Into) {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n (ClipboardType::Selection, None) => return,\n _ => &mut self.clipboard,\n };\n\n\n\nThe provided chunk is part of the `Clipboard` struct implementation, specifically the `Default` trait implementation and the `store` method.", "metadata": {"chunk_id": "doc_65_chunk_3", "original_index": 3, "pid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_3"}, "type": "Document"} +{"page_content": " clipboard.set_contents(text.into()).unwrap_or_else(|err| {\n warn!(\"Unable to store text in clipboard: {}\", err);\n });\n }\n\n pub fn load(&mut self, ty: ClipboardType) -> String {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n _ => &mut self.clipboard,\n };\n\n match clipboard.get_contents() {\n Err(err) => {\n debug!(\"Unable to load text from clipboard: {}\", err);\n String::new()\n },\n Ok(text) => text,\n }\n }\n}\n\n\nThe provided chunk is part of the `Clipboard` struct implementation, which handles the storage and retrieval of clipboard data for different clipboard types (e.g., primary selection, clipboard) across various platforms (e.g., Wayland, X11, macOS, Windows).", "metadata": {"chunk_id": "doc_65_chunk_4", "original_index": 4, "pid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_4"}, "type": "Document"} +{"page_content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n\n\nThis chunk defines the data structures and types used by the Scheduler module, which is responsible for emitting events at a specific time in the future.", "metadata": {"chunk_id": "doc_66_chunk_0", "original_index": 0, "pid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_0"}, "type": "Document"} +{"page_content": " interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n\n\nThe provided chunk is part of a Rust module that implements a scheduler for emitting events at a specific time in the future. The chunk defines the `Timer` struct, which represents an event scheduled to be emitted at a specific time, and the `Scheduler` struct, which is responsible for managing and processing all pending timers.", "metadata": {"chunk_id": "doc_66_chunk_1", "original_index": 1, "pid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_1"}, "type": "Document"} +{"page_content": " while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n\n\nThe provided chunk is part of the `Scheduler` struct implementation, which is responsible for managing the scheduling and execution of events at specific times in the future.", "metadata": {"chunk_id": "doc_66_chunk_2", "original_index": 2, "pid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_2"}, "type": "Document"} +{"page_content": " // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n\n\nThe provided chunk is part of the `schedule` method of the `Scheduler` struct, which is responsible for scheduling a new event to be emitted at a specific time in the future.", "metadata": {"chunk_id": "doc_66_chunk_3", "original_index": 3, "pid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_3"}, "type": "Document"} +{"page_content": " /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n\n\nThe provided chunk is part of the implementation of a Scheduler struct, which is responsible for managing the scheduling and execution of timed events. The chunk includes methods for canceling a scheduled event, checking if a timer is already scheduled, and removing all timers scheduled for a specific window.", "metadata": {"chunk_id": "doc_66_chunk_4", "original_index": 4, "pid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_4"}, "type": "Document"} +{"page_content": "use std::io::Write;\nuse std::{io, panic};\n\nuse windows_sys::Win32::UI::WindowsAndMessaging::{\n MessageBoxW, MB_ICONERROR, MB_OK, MB_SETFOREGROUND, MB_TASKMODAL,\n};\n\nuse alacritty_terminal::tty::windows::win32_string;\n\n// Install a panic handler that renders the panic in a classical Windows error\n// dialog box as well as writes the panic to STDERR.\npub fn attach_handler() {\n panic::set_hook(Box::new(|panic_info| {\n let _ = writeln!(io::stderr(), \"{}\", panic_info);\n let msg = format!(\"{}\\n\\nPress Ctrl-C to Copy\", panic_info);\n unsafe {\n MessageBoxW(\n 0isize,\n win32_string(&msg).as_ptr(),\n win32_string(\"Alacritty: Runtime Error\").as_ptr(),\n MB_ICONERROR | MB_OK | MB_SETFOREGROUND | MB_TASKMODAL,\n );\n }\n }));\n}\n\n\nThis chunk of code is a Rust implementation of a panic handler that displays a Windows error dialog box and writes the panic information to the standard error stream. It is likely part of a larger application or library that runs on the Windows operating system.", "metadata": {"chunk_id": "doc_67_chunk_0", "original_index": 0, "pid": "6e284600be25c8833b866ef0ebfab953a8d0a0f8420cfe56fa17e28664de1b82_0"}, "type": "Document"} +{"page_content": "//! Defines the Row type which makes up lines in the grid.\n\nuse std::cmp::{max, min};\nuse std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};\nuse std::{ptr, slice};\n\n#[cfg(feature = \"serde\")]\nuse serde::{Deserialize, Serialize};\n\nuse crate::grid::GridCell;\nuse crate::index::Column;\nuse crate::term::cell::ResetDiscriminant;\n\n/// A row in the grid.\n#[derive(Default, Clone, Debug)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct Row {\n inner: Vec,\n\n\n\nThe provided chunk defines the `Row` type, which represents a row in a grid. The `Row` type is part of a larger codebase that deals with terminal grid management.", "metadata": {"chunk_id": "doc_68_chunk_0", "original_index": 0, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_0"}, "type": "Document"} +{"page_content": " /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n\n\nThe provided chunk defines the `Row` type, which represents a row in a grid. It includes the `occ` field that tracks the maximum number of occupied entries in the row, as well as the implementation of the `PartialEq` trait and the `new` method for creating a new `Row` instance.", "metadata": {"chunk_id": "doc_68_chunk_1", "original_index": 1, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_1"}, "type": "Document"} +{"page_content": " let mut inner: Vec = Vec::with_capacity(columns);\n\n // This is a slightly optimized version of `std::vec::Vec::resize`.\n unsafe {\n let mut ptr = inner.as_mut_ptr();\n\n for _ in 1..columns {\n ptr::write(ptr, T::default());\n ptr = ptr.offset(1);\n }\n ptr::write(ptr, T::default());\n\n inner.set_len(columns);\n }\n\n Row { inner, occ: 0 }\n }\n\n\n\nThe provided chunk is part of the `Row` struct implementation, specifically the `new()` method that creates a new terminal row. This method initializes a `Vec` with the specified number of columns, setting each element to the default value of `T`. The resulting `Vec` is then used to construct a new `Row` instance with the initialized inner vector and an occupancy count of 0.", "metadata": {"chunk_id": "doc_68_chunk_2", "original_index": 2, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_2"}, "type": "Document"} +{"page_content": " /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n\n\nThe provided chunk defines the `grow` and `shrink` methods of the `Row` struct, which are used to increase or decrease the number of columns in the row, respectively. These methods are part of the implementation of the `Row` struct, which represents a row in a grid.", "metadata": {"chunk_id": "doc_68_chunk_3", "original_index": 3, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_3"}, "type": "Document"} +{"page_content": " /// Reset all cells in the row to the `template` cell.\n #[inline]\n pub fn reset(&mut self, template: &T)\n where\n T: ResetDiscriminant + GridCell,\n D: PartialEq,\n {\n debug_assert!(!self.inner.is_empty());\n\n // Mark all cells as dirty if template cell changed.\n let len = self.inner.len();\n if self.inner[len - 1].discriminant() != template.discriminant() {\n self.occ = len;\n }\n\n // Reset every dirty cell in the row.\n for item in &mut self.inner[0..self.occ] {\n item.reset(template);\n }\n\n self.occ = 0;\n }\n}\n\n#[allow(clippy::len_without_is_empty)]\nimpl Row {\n #[inline]\n pub fn from_vec(vec: Vec, occ: usize) -> Row {\n Row { inner: vec, occ }\n }\n\n\n\nThe provided chunk defines the `reset` method for the `Row` struct, which is responsible for resetting all cells in the row to a specified template cell. This method is part of the implementation of the `Row` struct, which represents a row in a grid data structure.", "metadata": {"chunk_id": "doc_68_chunk_4", "original_index": 4, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_4"}, "type": "Document"} +{"page_content": " #[inline]\n pub fn len(&self) -> usize {\n self.inner.len()\n }\n\n #[inline]\n pub fn last(&self) -> Option<&T> {\n self.inner.last()\n }\n\n #[inline]\n pub fn last_mut(&mut self) -> Option<&mut T> {\n self.occ = self.inner.len();\n self.inner.last_mut()\n }\n\n #[inline]\n pub fn append(&mut self, vec: &mut Vec)\n where\n T: GridCell,\n {\n self.occ += vec.len();\n self.inner.append(vec);\n }\n\n #[inline]\n pub fn append_front(&mut self, mut vec: Vec) {\n self.occ += vec.len();\n\n vec.append(&mut self.inner);\n self.inner = vec;\n }\n\n\n\nThis chunk defines several methods for the `Row` struct, including methods to get the length of the row, access the last element, append elements to the end or front of the row, and more. These methods are part of the implementation of the `Row` type, which represents a row in a grid data structure.", "metadata": {"chunk_id": "doc_68_chunk_5", "original_index": 5, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_5"}, "type": "Document"} +{"page_content": " /// Check if all cells in the row are empty.\n #[inline]\n pub fn is_clear(&self) -> bool\n where\n T: GridCell,\n {\n self.inner.iter().all(GridCell::is_empty)\n }\n\n #[inline]\n pub fn front_split_off(&mut self, at: usize) -> Vec {\n self.occ = self.occ.saturating_sub(at);\n\n let mut split = self.inner.split_off(at);\n std::mem::swap(&mut split, &mut self.inner);\n split\n }\n}\n\n\n\nThe provided chunk defines two methods for the `Row` struct: `is_clear()` and `front_split_off()`. The `is_clear()` method checks if all cells in the row are empty, while the `front_split_off()` method splits off the front part of the row and returns it as a new vector.", "metadata": {"chunk_id": "doc_68_chunk_6", "original_index": 6, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_6"}, "type": "Document"} +{"page_content": "impl<'a, T> IntoIterator for &'a Row {\n type IntoIter = slice::Iter<'a, T>;\n type Item = &'a T;\n\n #[inline]\n fn into_iter(self) -> slice::Iter<'a, T> {\n self.inner.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Row {\n type IntoIter = slice::IterMut<'a, T>;\n type Item = &'a mut T;\n\n #[inline]\n fn into_iter(self) -> slice::IterMut<'a, T> {\n self.occ = self.len();\n self.inner.iter_mut()\n }\n}\n\n\n\nThe provided chunk defines the `IntoIterator` trait implementations for the `Row` struct, allowing for iteration over the row's elements.", "metadata": {"chunk_id": "doc_68_chunk_7", "original_index": 7, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_7"}, "type": "Document"} +{"page_content": "impl Index for Row {\n type Output = T;\n\n #[inline]\n fn index(&self, index: Column) -> &T {\n &self.inner[index.0]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, index: Column) -> &mut T {\n self.occ = max(self.occ, *index + 1);\n &mut self.inner[index.0]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: Range) -> &[T] {\n &self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: Range) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\n\n\nThis chunk defines the implementation of the `Index` and `IndexMut` traits for the `Row` struct, which allows for accessing and modifying individual cells or ranges of cells within the row.", "metadata": {"chunk_id": "doc_68_chunk_8", "original_index": 8, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_8"}, "type": "Document"} +{"page_content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeTo) -> &[T] {\n &self.inner[..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeTo) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[..(index.end.0)]\n }\n}\n\n\n\nThe provided chunk defines the implementation of the `Index` and `IndexMut` traits for the `Row` type, specifically for the `RangeTo` index range. This allows accessing and mutating a subset of the cells in the row, up to the specified column index.", "metadata": {"chunk_id": "doc_68_chunk_9", "original_index": 9, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_9"}, "type": "Document"} +{"page_content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeFrom) -> &[T] {\n &self.inner[(index.start.0)..]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeFrom) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[(index.start.0)..]\n }\n}\n\nimpl Index for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, _: RangeFull) -> &[T] {\n &self.inner[..]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, _: RangeFull) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[..]\n }\n}\n\n\n\nThe provided chunk defines the implementation of the `Index` and `IndexMut` traits for the `Row` struct, specifically for the `RangeFrom`, `RangeFull`, and `RangeFull` index types. This allows for efficient indexing and mutation of the row's underlying data.", "metadata": {"chunk_id": "doc_68_chunk_10", "original_index": 10, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_10"}, "type": "Document"} +{"page_content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeToInclusive) -> &[T] {\n &self.inner[..=(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeToInclusive) -> &mut [T] {\n self.occ = max(self.occ, *index.end + 1);\n &mut self.inner[..=(index.end.0)]\n }\n}\n\n\nThe provided chunk defines the implementation of the `Index` and `IndexMut` traits for the `Row` type, specifically for the `RangeToInclusive` index range. This allows for accessing and modifying a range of elements in the `Row` struct, up to and including the specified column index.", "metadata": {"chunk_id": "doc_68_chunk_11", "original_index": 11, "pid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_11"}, "type": "Document"} +{"page_content": "use log::LevelFilter;\n\nuse alacritty_config_derive::ConfigDeserialize;\n\n/// Debugging options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Debug {\n pub log_level: LevelFilter,\n\n pub print_events: bool,\n\n /// Keep the log file after quitting.\n pub persistent_logging: bool,\n\n /// Should show render timer.\n pub render_timer: bool,\n\n /// Highlight damage information produced by alacritty.\n pub highlight_damage: bool,\n\n /// The renderer alacritty should be using.\n pub renderer: Option,\n\n /// Use EGL as display API if the current platform allows it.\n pub prefer_egl: bool,\n\n /// Record ref test.\n #[config(skip)]\n pub ref_test: bool,\n}\n\n\n\nThis chunk defines a `Debug` struct that contains various debugging options for the Alacritty terminal emulator, including log level, event printing, persistent logging, render timer, damage highlighting, renderer preference, and EGL usage.", "metadata": {"chunk_id": "doc_69_chunk_0", "original_index": 0, "pid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293_0"}, "type": "Document"} +{"page_content": "impl Default for Debug {\n fn default() -> Self {\n Self {\n log_level: LevelFilter::Warn,\n print_events: Default::default(),\n persistent_logging: Default::default(),\n render_timer: Default::default(),\n highlight_damage: Default::default(),\n ref_test: Default::default(),\n renderer: Default::default(),\n prefer_egl: Default::default(),\n }\n }\n}\n\n/// The renderer configuration options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum RendererPreference {\n /// OpenGL 3.3 renderer.\n Glsl3,\n\n /// GLES 2 renderer, with optional extensions like dual source blending.\n Gles2,\n\n /// Pure GLES 2 renderer.\n Gles2Pure,\n}\n\n\nThe chunk represents the implementation of the `Debug` struct and the definition of the `RendererPreference` enum within the overall document, which appears to be related to configuration options for the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_69_chunk_1", "original_index": 1, "pid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293_1"}, "type": "Document"} +{"page_content": "//! The display subsystem including window management, font rasterization, and\n//! GPU drawing.\n\nuse std::cmp;\nuse std::fmt::{self, Formatter};\nuse std::mem::{self, ManuallyDrop};\nuse std::num::NonZeroU32;\nuse std::ops::{Deref, DerefMut};\nuse std::time::{Duration, Instant};\n\nuse glutin::context::{NotCurrentContext, PossiblyCurrentContext};\nuse glutin::prelude::*;\nuse glutin::surface::{Surface, SwapInterval, WindowSurface};\n\nuse log::{debug, info};\nuse parking_lot::MutexGuard;\nuse raw_window_handle::RawWindowHandle;\nuse serde::{Deserialize, Serialize};\nuse winit::dpi::PhysicalSize;\nuse winit::keyboard::ModifiersState;\nuse winit::window::CursorIcon;\n\nuse crossfont::{Rasterize, Rasterizer, Size as FontSize};\nuse unicode_width::UnicodeWidthChar;\n\n\n\nThis chunk describes the display subsystem of the Alacritty terminal emulator, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_0", "original_index": 0, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_0"}, "type": "Document"} +{"page_content": "use alacritty_terminal::event::{EventListener, OnResize, WindowSize};\nuse alacritty_terminal::grid::Dimensions as TermDimensions;\nuse alacritty_terminal::index::{Column, Direction, Line, Point};\nuse alacritty_terminal::selection::Selection;\nuse alacritty_terminal::term::cell::Flags;\nuse alacritty_terminal::term::{\n self, point_to_viewport, LineDamageBounds, Term, TermDamage, TermMode, MIN_COLUMNS,\n MIN_SCREEN_LINES,\n};\nuse alacritty_terminal::vte::ansi::{CursorShape, NamedColor};\n\n\n\nThis chunk contains imports related to the terminal and grid dimensions, cursor and selection handling, and terminal modes and damage tracking. It is part of the display subsystem of the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_70_chunk_1", "original_index": 1, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_1"}, "type": "Document"} +{"page_content": "use crate::config::font::Font;\nuse crate::config::window::Dimensions;\n#[cfg(not(windows))]\nuse crate::config::window::StartupMode;\nuse crate::config::UiConfig;\nuse crate::display::bell::VisualBell;\nuse crate::display::color::{List, Rgb};\nuse crate::display::content::{RenderableContent, RenderableCursor};\nuse crate::display::cursor::IntoRects;\nuse crate::display::damage::{damage_y_to_viewport_y, DamageTracker};\nuse crate::display::hint::{HintMatch, HintState};\nuse crate::display::meter::Meter;\nuse crate::display::window::Window;\nuse crate::event::{Event, EventType, Mouse, SearchState};\nuse crate::message_bar::{MessageBuffer, MessageType};\nuse crate::renderer::rects::{RenderLine, RenderLines, RenderRect};\nuse crate::renderer::{self, GlyphCache, Renderer};\nuse crate::scheduler::{Scheduler, TimerId, Topic};\nuse crate::string::{ShortenDirection, StrShortener};\n\n\n\nThis chunk contains the imports and definitions related to the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_2", "original_index": 2, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_2"}, "type": "Document"} +{"page_content": "pub mod color;\npub mod content;\npub mod cursor;\npub mod hint;\npub mod window;\n\nmod bell;\nmod damage;\nmod meter;\n\n/// Label for the forward terminal search bar.\nconst FORWARD_SEARCH_LABEL: &str = \"Search: \";\n\n/// Label for the backward terminal search bar.\nconst BACKWARD_SEARCH_LABEL: &str = \"Backward Search: \";\n\n/// The character used to shorten the visible text like uri preview or search regex.\nconst SHORTENER: char = '…';\n\n/// Color which is used to highlight damaged rects when debugging.\nconst DAMAGE_RECT_COLOR: Rgb = Rgb::new(255, 0, 255);\n\n#[derive(Debug)]\npub enum Error {\n /// Error with window management.\n Window(window::Error),\n\n\n\nThis chunk contains the module declarations and error definitions for the display subsystem of the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_70_chunk_3", "original_index": 3, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_3"}, "type": "Document"} +{"page_content": " /// Error dealing with fonts.\n Font(crossfont::Error),\n\n /// Error in renderer.\n Render(renderer::Error),\n\n /// Error during context operations.\n Context(glutin::error::Error),\n}\n\nimpl std::error::Error for Error {\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n match self {\n Error::Window(err) => err.source(),\n Error::Font(err) => err.source(),\n Error::Render(err) => err.source(),\n Error::Context(err) => err.source(),\n }\n }\n}\n\n\n\nThe chunk describes the different types of errors that can occur in the display subsystem, including font-related errors, rendering errors, and context-related errors.", "metadata": {"chunk_id": "doc_70_chunk_4", "original_index": 4, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_4"}, "type": "Document"} +{"page_content": "impl fmt::Display for Error {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n match self {\n Error::Window(err) => err.fmt(f),\n Error::Font(err) => err.fmt(f),\n Error::Render(err) => err.fmt(f),\n Error::Context(err) => err.fmt(f),\n }\n }\n}\n\nimpl From for Error {\n fn from(val: window::Error) -> Self {\n Error::Window(val)\n }\n}\n\nimpl From for Error {\n fn from(val: crossfont::Error) -> Self {\n Error::Font(val)\n }\n}\n\nimpl From for Error {\n fn from(val: renderer::Error) -> Self {\n Error::Render(val)\n }\n}\n\nimpl From for Error {\n fn from(val: glutin::error::Error) -> Self {\n Error::Context(val)\n }\n}\n\n\n\nThis chunk defines the `fmt::Display` implementation for the `Error` enum, as well as the `From` trait implementations for converting various error types into the `Error` enum. It is part of the display subsystem of the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_70_chunk_5", "original_index": 5, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_5"}, "type": "Document"} +{"page_content": "/// Terminal size info.\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]\npub struct SizeInfo {\n /// Terminal window width.\n width: T,\n\n /// Terminal window height.\n height: T,\n\n /// Width of individual cell.\n cell_width: T,\n\n /// Height of individual cell.\n cell_height: T,\n\n /// Horizontal window padding.\n padding_x: T,\n\n /// Vertical window padding.\n padding_y: T,\n\n /// Number of lines in the viewport.\n screen_lines: usize,\n\n /// Number of columns in the viewport.\n columns: usize,\n}\n\n\n\nThe chunk defines a struct called `SizeInfo` that contains information about the terminal window size, including the width, height, cell dimensions, padding, and number of lines and columns in the viewport. This struct is used throughout the display subsystem to manage the terminal's dimensions.", "metadata": {"chunk_id": "doc_70_chunk_6", "original_index": 6, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_6"}, "type": "Document"} +{"page_content": "impl From> for SizeInfo {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n width: size_info.width as u32,\n height: size_info.height as u32,\n cell_width: size_info.cell_width as u32,\n cell_height: size_info.cell_height as u32,\n padding_x: size_info.padding_x as u32,\n padding_y: size_info.padding_y as u32,\n screen_lines: size_info.screen_lines,\n columns: size_info.screen_lines,\n }\n }\n}\n\n\n\nThe chunk is part of the `SizeInfo` struct implementation, which represents terminal size information. It provides a conversion from `SizeInfo` to `SizeInfo`.", "metadata": {"chunk_id": "doc_70_chunk_7", "original_index": 7, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_7"}, "type": "Document"} +{"page_content": "impl From> for WindowSize {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n num_cols: size_info.columns() as u16,\n num_lines: size_info.screen_lines() as u16,\n cell_width: size_info.cell_width() as u16,\n cell_height: size_info.cell_height() as u16,\n }\n }\n}\n\nimpl SizeInfo {\n #[inline]\n pub fn width(&self) -> T {\n self.width\n }\n\n #[inline]\n pub fn height(&self) -> T {\n self.height\n }\n\n #[inline]\n pub fn cell_width(&self) -> T {\n self.cell_width\n }\n\n #[inline]\n pub fn cell_height(&self) -> T {\n self.cell_height\n }\n\n #[inline]\n pub fn padding_x(&self) -> T {\n self.padding_x\n }\n\n #[inline]\n pub fn padding_y(&self) -> T {\n self.padding_y\n }\n}\n\n\n\nThis chunk defines the `SizeInfo` struct and its associated methods, as well as an implementation of the `From` trait to convert `SizeInfo` to `WindowSize`. It is part of the display subsystem in the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_70_chunk_8", "original_index": 8, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_8"}, "type": "Document"} +{"page_content": "impl SizeInfo {\n #[allow(clippy::too_many_arguments)]\n pub fn new(\n width: f32,\n height: f32,\n cell_width: f32,\n cell_height: f32,\n mut padding_x: f32,\n mut padding_y: f32,\n dynamic_padding: bool,\n ) -> SizeInfo {\n if dynamic_padding {\n padding_x = Self::dynamic_padding(padding_x.floor(), width, cell_width);\n padding_y = Self::dynamic_padding(padding_y.floor(), height, cell_height);\n }\n\n\n\nThe chunk is part of the `SizeInfo` struct implementation, which is responsible for managing the terminal's size and dimensions.", "metadata": {"chunk_id": "doc_70_chunk_9", "original_index": 9, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_9"}, "type": "Document"} +{"page_content": " let lines = (height - 2. * padding_y) / cell_height;\n let screen_lines = cmp::max(lines as usize, MIN_SCREEN_LINES);\n\n let columns = (width - 2. * padding_x) / cell_width;\n let columns = cmp::max(columns as usize, MIN_COLUMNS);\n\n SizeInfo {\n width,\n height,\n cell_width,\n cell_height,\n padding_x: padding_x.floor(),\n padding_y: padding_y.floor(),\n screen_lines,\n columns,\n }\n }\n\n #[inline]\n pub fn reserve_lines(&mut self, count: usize) {\n self.screen_lines = cmp::max(self.screen_lines.saturating_sub(count), MIN_SCREEN_LINES);\n }\n\n\n\nThis chunk calculates the size information for the terminal window, including the number of screen lines, columns, and padding. It is part of the `SizeInfo` struct implementation within the `display` module of the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_70_chunk_10", "original_index": 10, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_10"}, "type": "Document"} +{"page_content": " /// Check if coordinates are inside the terminal grid.\n ///\n /// The padding, message bar or search are not counted as part of the grid.\n #[inline]\n pub fn contains_point(&self, x: usize, y: usize) -> bool {\n x <= (self.padding_x + self.columns as f32 * self.cell_width) as usize\n && x > self.padding_x as usize\n && y <= (self.padding_y + self.screen_lines as f32 * self.cell_height) as usize\n && y > self.padding_y as usize\n }\n\n\n\nThe chunk is part of the `SizeInfo` struct, which represents the terminal size information, including the width, height, cell dimensions, and padding. The `contains_point` method checks if the given coordinates are inside the terminal grid, excluding the padding, message bar, and search areas.", "metadata": {"chunk_id": "doc_70_chunk_11", "original_index": 11, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_11"}, "type": "Document"} +{"page_content": " /// Calculate padding to spread it evenly around the terminal content.\n #[inline]\n fn dynamic_padding(padding: f32, dimension: f32, cell_dimension: f32) -> f32 {\n padding + ((dimension - 2. * padding) % cell_dimension) / 2.\n }\n}\n\nimpl TermDimensions for SizeInfo {\n #[inline]\n fn columns(&self) -> usize {\n self.columns\n }\n\n #[inline]\n fn screen_lines(&self) -> usize {\n self.screen_lines\n }\n\n #[inline]\n fn total_lines(&self) -> usize {\n self.screen_lines()\n }\n}\n\n\n\nThis chunk defines a function to calculate dynamic padding for the terminal content, and implements the `TermDimensions` trait for the `SizeInfo` struct, which represents the terminal size information.", "metadata": {"chunk_id": "doc_70_chunk_12", "original_index": 12, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_12"}, "type": "Document"} +{"page_content": "#[derive(Default, Clone, Debug, PartialEq, Eq)]\npub struct DisplayUpdate {\n pub dirty: bool,\n\n dimensions: Option>,\n cursor_dirty: bool,\n font: Option,\n}\n\nimpl DisplayUpdate {\n pub fn dimensions(&self) -> Option> {\n self.dimensions\n }\n\n pub fn font(&self) -> Option<&Font> {\n self.font.as_ref()\n }\n\n pub fn cursor_dirty(&self) -> bool {\n self.cursor_dirty\n }\n\n pub fn set_dimensions(&mut self, dimensions: PhysicalSize) {\n self.dimensions = Some(dimensions);\n self.dirty = true;\n }\n\n pub fn set_font(&mut self, font: Font) {\n self.font = Some(font);\n self.dirty = true;\n }\n\n pub fn set_cursor_dirty(&mut self) {\n self.cursor_dirty = true;\n self.dirty = true;\n }\n}\n\n\n\nThe chunk represents a struct called `DisplayUpdate` that is used to track changes in the display, such as dimensions, cursor state, and font changes. It is part of the larger `display` module that handles the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_13", "original_index": 13, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_13"}, "type": "Document"} +{"page_content": "/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n\n\nThe chunk describes the `Display` struct, which is the main component responsible for the display subsystem in the application, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_14", "original_index": 14, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_14"}, "type": "Document"} +{"page_content": " /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n\n\nThis chunk represents the fields and initialization of the `Display` struct, which is responsible for managing the display subsystem including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_15", "original_index": 15, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_15"}, "type": "Document"} +{"page_content": " let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n\n\nThis chunk is responsible for loading the font, computing the cell size, and resizing the window to account for the user-configured size.", "metadata": {"chunk_id": "doc_70_chunk_16", "original_index": 16, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_16"}, "type": "Document"} +{"page_content": " // Create the GL surface to draw into.\n let surface = renderer::platform::create_gl_surface(\n &gl_context,\n window.inner_size(),\n window.raw_window_handle(),\n )?;\n\n // Make the context current.\n let context = gl_context.make_current(&surface)?;\n\n // Create renderer.\n let mut renderer = Renderer::new(&context, config.debug.renderer)?;\n\n\n\nThe chunk is situated within the `Display` struct's `new` method, which is responsible for initializing the display subsystem, including the window, font rasterizer, and GPU renderer.", "metadata": {"chunk_id": "doc_70_chunk_17", "original_index": 17, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_17"}, "type": "Document"} +{"page_content": " // Load font common glyphs to accelerate rendering.\n debug!(\"Filling glyph cache with common glyphs\");\n renderer.with_loader(|mut api| {\n glyph_cache.reset_glyph_cache(&mut api);\n });\n\n let padding = config.window.padding(window.scale_factor as f32);\n let viewport_size = window.inner_size();\n\n // Create new size with at least one column and row.\n let size_info = SizeInfo::new(\n viewport_size.width as f32,\n viewport_size.height as f32,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding && config.window.dimensions().is_none(),\n );\n\n\n\nThis chunk is part of the code that initializes the display subsystem, including the window, font rasterizer, and GPU renderer. It loads common glyphs into the glyph cache to accelerate rendering, and then creates a new `SizeInfo` object that represents the size of the terminal window, taking into account the configured padding and dynamic padding if necessary.", "metadata": {"chunk_id": "doc_70_chunk_18", "original_index": 18, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_18"}, "type": "Document"} +{"page_content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n info!(\"Padding: {} x {}\", size_info.padding_x(), size_info.padding_y());\n info!(\"Width: {}, Height: {}\", size_info.width(), size_info.height());\n\n // Update OpenGL projection.\n renderer.resize(&size_info);\n\n // Clear screen.\n let background_color = config.colors.primary.background;\n renderer.clear(background_color, config.window_opacity());\n\n // Disable shadows for transparent windows on macOS.\n #[cfg(target_os = \"macos\")]\n window.set_has_shadow(config.window_opacity() >= 1.0);\n\n let is_wayland = matches!(raw_window_handle, RawWindowHandle::Wayland(_));\n\n\n\nThis chunk is part of the `Display` struct's `new` method, which is responsible for initializing the display subsystem, including the window, font rasterizer, and GPU renderer.", "metadata": {"chunk_id": "doc_70_chunk_19", "original_index": 19, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_19"}, "type": "Document"} +{"page_content": " // On Wayland we can safely ignore this call, since the window isn't visible until you\n // actually draw something into it and commit those changes.\n if !is_wayland {\n surface.swap_buffers(&context).expect(\"failed to swap buffers.\");\n renderer.finish();\n }\n\n // Set resize increments for the newly created window.\n if config.window.resize_increments {\n window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n window.set_visible(true);\n\n\n\nThis chunk is part of the `Display` module, which is responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing. The chunk is part of the `Display::new` function, which initializes a new `Display` instance.", "metadata": {"chunk_id": "doc_70_chunk_20", "original_index": 20, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_20"}, "type": "Document"} +{"page_content": " #[allow(clippy::single_match)]\n #[cfg(not(windows))]\n if !_tabbed {\n match config.window.startup_mode {\n #[cfg(target_os = \"macos\")]\n StartupMode::SimpleFullscreen => window.set_simple_fullscreen(true),\n StartupMode::Maximized if !is_wayland => window.set_maximized(true),\n _ => (),\n }\n }\n\n let hint_state = HintState::new(config.hints.alphabet());\n\n let mut damage_tracker = DamageTracker::new(size_info.screen_lines(), size_info.columns());\n damage_tracker.debug = config.debug.highlight_damage;\n\n // Disable vsync.\n if let Err(err) = surface.set_swap_interval(&context, SwapInterval::DontWait) {\n info!(\"Failed to disable vsync: {}\", err);\n }\n\n\n\nThis chunk sets up the display window, including configuring the startup mode, creating a hint state, and setting up a damage tracker. It also disables vsync for the display.", "metadata": {"chunk_id": "doc_70_chunk_21", "original_index": 21, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_21"}, "type": "Document"} +{"page_content": " Ok(Self {\n context: ManuallyDrop::new(Replaceable::new(context)),\n visual_bell: VisualBell::from(&config.bell),\n renderer: ManuallyDrop::new(renderer),\n surface: ManuallyDrop::new(surface),\n colors: List::from(&config.colors),\n frame_timer: FrameTimer::new(),\n raw_window_handle,\n damage_tracker,\n glyph_cache,\n hint_state,\n size_info,\n font_size,\n window,\n pending_renderer_update: Default::default(),\n vi_highlighted_hint: Default::default(),\n highlighted_hint: Default::default(),\n hint_mouse_point: Default::default(),\n pending_update: Default::default(),\n cursor_hidden: Default::default(),\n meter: Default::default(),\n ime: Default::default(),\n })\n }\n\n\n\nThis chunk is the constructor for the `Display` struct, which is responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_22", "original_index": 22, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_22"}, "type": "Document"} +{"page_content": " #[inline]\n pub fn gl_context(&self) -> &PossiblyCurrentContext {\n self.context.get()\n }\n\n pub fn make_not_current(&mut self) {\n if self.context.get().is_current() {\n self.context.replace_with(|context| {\n context\n .make_not_current()\n .expect(\"failed to disable context\")\n .treat_as_possibly_current()\n });\n }\n }\n\n pub fn make_current(&self) {\n if !self.context.get().is_current() {\n self.context.make_current(&self.surface).expect(\"failed to make context current\")\n }\n }\n\n\n\nThis chunk contains methods related to managing the OpenGL context for the display subsystem, including getting the current context, making the context current, and making the context not current.", "metadata": {"chunk_id": "doc_70_chunk_23", "original_index": 23, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_23"}, "type": "Document"} +{"page_content": " fn swap_buffers(&self) {\n #[allow(clippy::single_match)]\n let res = match (self.surface.deref(), &self.context.get()) {\n #[cfg(not(any(target_os = \"macos\", windows)))]\n (Surface::Egl(surface), PossiblyCurrentContext::Egl(context))\n if matches!(self.raw_window_handle, RawWindowHandle::Wayland(_))\n && !self.damage_tracker.debug =>\n {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n surface.swap_buffers_with_damage(context, &damage)\n },\n (surface, context) => surface.swap_buffers(context),\n };\n if let Err(err) = res {\n debug!(\"error calling swap_buffers: {}\", err);\n }\n }\n\n\n\nThe `swap_buffers` function is responsible for swapping the front and back buffers of the display, which is a critical step in the rendering process. It is part of the `Display` struct, which is the main component responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_24", "original_index": 24, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_24"}, "type": "Document"} +{"page_content": " /// Update font size and cell dimensions.\n ///\n /// This will return a tuple of the cell width and height.\n fn update_font_size(\n glyph_cache: &mut GlyphCache,\n config: &UiConfig,\n font: &Font,\n ) -> (f32, f32) {\n let _ = glyph_cache.update_font_size(font);\n\n // Compute new cell sizes.\n compute_cell_size(config, &glyph_cache.font_metrics())\n }\n\n /// Reset glyph cache.\n fn reset_glyph_cache(&mut self) {\n let cache = &mut self.glyph_cache;\n self.renderer.with_loader(|mut api| {\n cache.reset_glyph_cache(&mut api);\n });\n }\n\n\n\nThis chunk is part of the `Display` module, which is responsible for the display subsystem including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_25", "original_index": 25, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_25"}, "type": "Document"} +{"page_content": " // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n\n\nThe chunk is part of the `Display` struct implementation, which is responsible for managing the display subsystem including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_26", "original_index": 26, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_26"}, "type": "Document"} +{"page_content": " let (mut cell_width, mut cell_height) =\n (self.size_info.cell_width(), self.size_info.cell_height());\n\n if pending_update.font().is_some() || pending_update.cursor_dirty() {\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.clear_font_cache = true\n }\n\n // Update font size and cell dimensions.\n if let Some(font) = pending_update.font() {\n let cell_dimensions = Self::update_font_size(&mut self.glyph_cache, config, font);\n cell_width = cell_dimensions.0;\n cell_height = cell_dimensions.1;\n\n\n\nThis chunk is part of the `handle_update` function in the `Display` struct, which is responsible for processing update events and updating the display accordingly.", "metadata": {"chunk_id": "doc_70_chunk_27", "original_index": 27, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_27"}, "type": "Document"} +{"page_content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n\n // Mark entire terminal as damaged since glyph size could change without cell size\n // changes.\n self.damage_tracker.frame().mark_fully_damaged();\n }\n\n let (mut width, mut height) = (self.size_info.width(), self.size_info.height());\n if let Some(dimensions) = pending_update.dimensions() {\n width = dimensions.width as f32;\n height = dimensions.height as f32;\n }\n\n\n\nThis chunk is part of the `handle_update` function in the `Display` struct, which is responsible for processing update events and updating the display accordingly.", "metadata": {"chunk_id": "doc_70_chunk_28", "original_index": 28, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_28"}, "type": "Document"} +{"page_content": " let padding = config.window.padding(self.window.scale_factor as f32);\n\n let mut new_size = SizeInfo::new(\n width,\n height,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding,\n );\n\n // Update number of column/lines in the viewport.\n let search_active = search_state.history_index.is_some();\n let message_bar_lines = message_buffer.message().map_or(0, |m| m.text(&new_size).len());\n let search_lines = usize::from(search_active);\n new_size.reserve_lines(message_bar_lines + search_lines);\n\n\n\nThis chunk is part of the `handle_update` function in the `Display` struct, which is responsible for processing update events and updating the terminal's size and dimensions.", "metadata": {"chunk_id": "doc_70_chunk_29", "original_index": 29, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_29"}, "type": "Document"} +{"page_content": " // Update resize increments.\n if config.window.resize_increments {\n self.window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n // Resize when terminal when its dimensions have changed.\n if self.size_info.screen_lines() != new_size.screen_lines\n || self.size_info.columns() != new_size.columns()\n {\n // Resize PTY.\n pty_resize_handle.on_resize(new_size.into());\n\n\n\nThis chunk is part of the `handle_update` function in the `Display` struct, which is responsible for processing update events and resizing the terminal window accordingly.", "metadata": {"chunk_id": "doc_70_chunk_30", "original_index": 30, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_30"}, "type": "Document"} +{"page_content": " // Resize terminal.\n terminal.resize(new_size);\n\n // Resize damage tracking.\n self.damage_tracker.resize(new_size.screen_lines(), new_size.columns());\n }\n\n // Check if dimensions have changed.\n if new_size != self.size_info {\n // Queue renderer update.\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.resize = true;\n\n // Clear focused search match.\n search_state.clear_focused_match();\n }\n self.size_info = new_size;\n }\n\n\n\nThis chunk is part of the `handle_update` function in the `Display` struct, which is responsible for processing update events and resizing the terminal and renderer accordingly.", "metadata": {"chunk_id": "doc_70_chunk_31", "original_index": 31, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_31"}, "type": "Document"} +{"page_content": " // NOTE: Renderer updates are split off, since platforms like Wayland require resize and other\n // OpenGL operations to be performed right before rendering. Otherwise they could lock the\n // back buffer and render with the previous state. This also solves flickering during resizes.\n //\n /// Update the state of the renderer.\n pub fn process_renderer_update(&mut self) {\n let renderer_update = match self.pending_renderer_update.take() {\n Some(renderer_update) => renderer_update,\n _ => return,\n };\n\n\n\nThe chunk is part of the `Display` struct's implementation, which is responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_32", "original_index": 32, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_32"}, "type": "Document"} +{"page_content": " // Resize renderer.\n if renderer_update.resize {\n let width = NonZeroU32::new(self.size_info.width() as u32).unwrap();\n let height = NonZeroU32::new(self.size_info.height() as u32).unwrap();\n self.surface.resize(&self.context, width, height);\n }\n\n // Ensure we're modifying the correct OpenGL context.\n self.make_current();\n\n if renderer_update.clear_font_cache {\n self.reset_glyph_cache();\n }\n\n self.renderer.resize(&self.size_info);\n\n info!(\"Padding: {} x {}\", self.size_info.padding_x(), self.size_info.padding_y());\n info!(\"Width: {}, Height: {}\", self.size_info.width(), self.size_info.height());\n }\n\n\n\nThis chunk is part of the `process_renderer_update` method of the `Display` struct, which is responsible for updating the state of the renderer before drawing the screen.", "metadata": {"chunk_id": "doc_70_chunk_33", "original_index": 33, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_33"}, "type": "Document"} +{"page_content": " /// Draw the screen.\n ///\n /// A reference to Term whose state is being drawn must be provided.\n ///\n /// This call may block if vsync is enabled.\n pub fn draw(\n &mut self,\n mut terminal: MutexGuard<'_, Term>,\n scheduler: &mut Scheduler,\n message_buffer: &MessageBuffer,\n config: &UiConfig,\n search_state: &mut SearchState,\n ) {\n // Collect renderable content before the terminal is dropped.\n let mut content = RenderableContent::new(config, self, &terminal, search_state);\n\n\nThe chunk is part of the `draw` method of the `Display` struct, which is responsible for rendering the terminal content to the screen.", "metadata": {"chunk_id": "doc_70_chunk_34", "original_index": 34, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_34"}, "type": "Document"} +{"page_content": " let mut grid_cells = Vec::new();\n for cell in &mut content {\n grid_cells.push(cell);\n }\n let selection_range = content.selection_range();\n let foreground_color = content.color(NamedColor::Foreground as usize);\n let background_color = content.color(NamedColor::Background as usize);\n let display_offset = content.display_offset();\n let cursor = content.cursor();\n\n let cursor_point = terminal.grid().cursor.point;\n let total_lines = terminal.grid().total_lines();\n let metrics = self.glyph_cache.font_metrics();\n let size_info = self.size_info;\n\n let vi_mode = terminal.mode().contains(TermMode::VI);\n let vi_cursor_point = if vi_mode { Some(terminal.vi_mode_cursor.point) } else { None };\n\n\n\nThis chunk is part of the `draw` method of the `Display` struct, which is responsible for rendering the terminal content, cursor, and other UI elements to the screen.", "metadata": {"chunk_id": "doc_70_chunk_35", "original_index": 35, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_35"}, "type": "Document"} +{"page_content": " // Add damage from the terminal.\n if self.collect_damage() {\n match terminal.damage() {\n TermDamage::Full => self.damage_tracker.frame().mark_fully_damaged(),\n TermDamage::Partial(damaged_lines) => {\n for damage in damaged_lines {\n self.damage_tracker.frame().damage_line(damage);\n }\n },\n }\n terminal.reset_damage();\n }\n\n // Drop terminal as early as possible to free lock.\n drop(terminal);\n\n // Add damage from alacritty's UI elements overlapping terminal.\n if self.collect_damage() {\n let requires_full_damage = self.visual_bell.intensity() != 0.\n || self.hint_state.active()\n || search_state.regex().is_some();\n\n\n\nThe chunk is part of the `draw` method of the `Display` struct, which is responsible for rendering the terminal and its associated UI elements.", "metadata": {"chunk_id": "doc_70_chunk_36", "original_index": 36, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_36"}, "type": "Document"} +{"page_content": " if requires_full_damage {\n self.damage_tracker.frame().mark_fully_damaged();\n self.damage_tracker.next_frame().mark_fully_damaged();\n }\n\n let vi_cursor_viewport_point =\n vi_cursor_point.and_then(|cursor| point_to_viewport(display_offset, cursor));\n\n self.damage_tracker.damage_vi_cursor(vi_cursor_viewport_point);\n self.damage_tracker.damage_selection(selection_range, display_offset);\n }\n\n // Make sure this window's OpenGL context is active.\n self.make_current();\n\n self.renderer.clear(background_color, config.window_opacity());\n let mut lines = RenderLines::new();\n\n // Optimize loop hint comparator.\n let has_highlighted_hint =\n self.highlighted_hint.is_some() || self.vi_highlighted_hint.is_some();\n\n\n\nThis chunk is part of the `draw` method of the `Display` struct, which is responsible for rendering the terminal content and other UI elements on the screen.", "metadata": {"chunk_id": "doc_70_chunk_37", "original_index": 37, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_37"}, "type": "Document"} +{"page_content": " // Draw grid.\n {\n let _sampler = self.meter.sampler();\n\n // Ensure macOS hasn't reset our viewport.\n #[cfg(target_os = \"macos\")]\n self.renderer.set_viewport(&size_info);\n\n let glyph_cache = &mut self.glyph_cache;\n let highlighted_hint = &self.highlighted_hint;\n let vi_highlighted_hint = &self.vi_highlighted_hint;\n let damage_tracker = &mut self.damage_tracker;\n\n self.renderer.draw_cells(\n &size_info,\n glyph_cache,\n grid_cells.into_iter().map(|mut cell| {\n // Underline hints hovered by mouse or vi mode cursor.\n let point = term::viewport_to_point(display_offset, cell.point);\n\n\n\nThis chunk is part of the `draw` method of the `Display` struct, which is responsible for rendering the terminal content to the screen. The chunk specifically handles the drawing of the terminal grid, including the highlighting of hints that are hovered by the mouse or the vi mode cursor.", "metadata": {"chunk_id": "doc_70_chunk_38", "original_index": 38, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_38"}, "type": "Document"} +{"page_content": " if has_highlighted_hint {\n let hyperlink =\n cell.extra.as_ref().and_then(|extra| extra.hyperlink.as_ref());\n if highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n || vi_highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n {\n cell.flags.insert(Flags::UNDERLINE);\n // Damage hints for the current and next frames.\n damage_tracker.frame().damage_point(cell.point);\n damage_tracker.next_frame().damage_point(cell.point);\n }\n }\n\n\n\nThis chunk is responsible for highlighting hyperlinks in the terminal display. It checks if there are any highlighted hints, either from the mouse or the vi mode cursor, and then underlines the corresponding cells in the terminal grid. It also damages the highlighted hints for the current and next frames to ensure they are redrawn.", "metadata": {"chunk_id": "doc_70_chunk_39", "original_index": 39, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_39"}, "type": "Document"} +{"page_content": " // Update underline/strikeout.\n lines.update(&cell);\n\n cell\n }),\n );\n }\n\n let mut rects = lines.rects(&metrics, &size_info);\n\n if let Some(vi_cursor_point) = vi_cursor_point {\n // Indicate vi mode by showing the cursor's position in the top right corner.\n let line = (-vi_cursor_point.line.0 + size_info.bottommost_line().0) as usize;\n let obstructed_column = Some(vi_cursor_point)\n .filter(|point| point.line == -(display_offset as i32))\n .map(|point| point.column);\n self.draw_line_indicator(config, total_lines, obstructed_column, line);\n } else if search_state.regex().is_some() {\n // Show current display offset in vi-less search to indicate match position.\n self.draw_line_indicator(config, total_lines, None, display_offset);\n };\n\n\n\nThis chunk is responsible for drawing the terminal grid, cursor, and other UI elements on the display. It updates the underline and strikeout for each cell, collects the rectangles to be rendered, and draws indicators for the vi mode cursor and search position.", "metadata": {"chunk_id": "doc_70_chunk_40", "original_index": 40, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_40"}, "type": "Document"} +{"page_content": " // Draw cursor.\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n\n // Push visual bell after url/underline/strikeout rects.\n let visual_bell_intensity = self.visual_bell.intensity();\n if visual_bell_intensity != 0. {\n let visual_bell_rect = RenderRect::new(\n 0.,\n 0.,\n size_info.width(),\n size_info.height(),\n config.bell.color,\n visual_bell_intensity as f32,\n );\n rects.push(visual_bell_rect);\n }\n\n\n\nThis chunk is responsible for drawing the cursor and the visual bell effect in the display subsystem of the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_70_chunk_41", "original_index": 41, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_41"}, "type": "Document"} +{"page_content": " // Handle IME positioning and search bar rendering.\n let ime_position = match search_state.regex() {\n Some(regex) => {\n let search_label = match search_state.direction() {\n Direction::Right => FORWARD_SEARCH_LABEL,\n Direction::Left => BACKWARD_SEARCH_LABEL,\n };\n\n let search_text = Self::format_search(regex, search_label, size_info.columns());\n\n // Render the search bar.\n self.draw_search(config, &search_text);\n\n // Draw search bar cursor.\n let line = size_info.screen_lines();\n let column = Column(search_text.chars().count() - 1);\n\n\n\nThis chunk is responsible for handling the positioning and rendering of the search bar in the display subsystem of the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_70_chunk_42", "original_index": 42, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_42"}, "type": "Document"} +{"page_content": " // Add cursor to search bar if IME is not active.\n if self.ime.preedit().is_none() {\n let fg = config.colors.footer_bar_foreground();\n let shape = CursorShape::Underline;\n let cursor = RenderableCursor::new(Point::new(line, column), shape, fg, false);\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n }\n\n Some(Point::new(line, column))\n },\n None => {\n let num_lines = self.size_info.screen_lines();\n term::point_to_viewport(display_offset, cursor_point)\n .filter(|point| point.line < num_lines)\n },\n };\n\n\n\nThis chunk is part of the `draw` method of the `Display` struct, which is responsible for rendering the terminal content and UI elements on the screen.", "metadata": {"chunk_id": "doc_70_chunk_43", "original_index": 43, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_43"}, "type": "Document"} +{"page_content": " // Handle IME.\n if self.ime.is_enabled() {\n if let Some(point) = ime_position {\n let (fg, bg) = if search_state.regex().is_some() {\n (config.colors.footer_bar_foreground(), config.colors.footer_bar_background())\n } else {\n (foreground_color, background_color)\n };\n\n self.draw_ime_preview(point, fg, bg, &mut rects, config);\n }\n }\n\n if let Some(message) = message_buffer.message() {\n let search_offset = usize::from(search_state.regex().is_some());\n let text = message.text(&size_info);\n\n\n\nThe chunk is part of the `draw` method of the `Display` struct, which is responsible for rendering the terminal content, cursor, and other UI elements on the screen.", "metadata": {"chunk_id": "doc_70_chunk_44", "original_index": 44, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_44"}, "type": "Document"} +{"page_content": " // Create a new rectangle for the background.\n let start_line = size_info.screen_lines() + search_offset;\n let y = size_info.cell_height().mul_add(start_line as f32, size_info.padding_y());\n\n let bg = match message.ty() {\n MessageType::Error => config.colors.normal.red,\n MessageType::Warning => config.colors.normal.yellow,\n };\n\n\n\nThis chunk is part of the `draw` method of the `Display` struct, which is responsible for rendering the terminal content and UI elements on the screen. The specific code snippet is responsible for drawing the message bar at the bottom of the terminal window, with the background color determined by the message type.", "metadata": {"chunk_id": "doc_70_chunk_45", "original_index": 45, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_45"}, "type": "Document"} +{"page_content": " let x = 0;\n let width = size_info.width() as i32;\n let height = (size_info.height() - y) as i32;\n let message_bar_rect =\n RenderRect::new(x as f32, y, width as f32, height as f32, bg, 1.);\n\n // Push message_bar in the end, so it'll be above all other content.\n rects.push(message_bar_rect);\n\n // Always damage message bar, since it could have messages of the same size in it.\n self.damage_tracker.frame().add_viewport_rect(&size_info, x, y as i32, width, height);\n\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n\n\n\nThis chunk is responsible for drawing the message bar at the bottom of the terminal window, including the background rectangle and the message text. It also damages the message bar area to ensure it is redrawn correctly.", "metadata": {"chunk_id": "doc_70_chunk_46", "original_index": 46, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_46"}, "type": "Document"} +{"page_content": " // Relay messages to the user.\n let glyph_cache = &mut self.glyph_cache;\n let fg = config.colors.primary.background;\n for (i, message_text) in text.iter().enumerate() {\n let point = Point::new(start_line + i, Column(0));\n self.renderer.draw_string(\n point,\n fg,\n bg,\n message_text.chars(),\n &size_info,\n glyph_cache,\n );\n }\n } else {\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n }\n\n\n\nThis chunk is responsible for drawing the message bar at the bottom of the terminal window, which displays error or warning messages to the user. It is part of the `Display` struct's `draw` method, which is responsible for rendering the entire terminal display.", "metadata": {"chunk_id": "doc_70_chunk_47", "original_index": 47, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_47"}, "type": "Document"} +{"page_content": " self.draw_render_timer(config);\n\n // Draw hyperlink uri preview.\n if has_highlighted_hint {\n let cursor_point = vi_cursor_point.or(Some(cursor_point));\n self.draw_hyperlink_preview(config, cursor_point, display_offset);\n }\n\n // Notify winit that we're about to present.\n self.window.pre_present_notify();\n\n // Highlight damage for debugging.\n if self.damage_tracker.debug {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n let mut rects = Vec::with_capacity(damage.len());\n self.highlight_damage(&mut rects);\n self.renderer.draw_rects(&self.size_info, &metrics, rects);\n }\n\n // Clearing debug highlights from the previous frame requires full redraw.\n self.swap_buffers();\n\n\n\nThis chunk is part of the `draw` method of the `Display` struct, which is responsible for rendering the terminal content and various UI elements on the screen.", "metadata": {"chunk_id": "doc_70_chunk_48", "original_index": 48, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_48"}, "type": "Document"} +{"page_content": " if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n\n\nThis chunk is part of the `Display` struct implementation, which is responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_49", "original_index": 49, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_49"}, "type": "Document"} +{"page_content": " /// Update the mouse/vi mode cursor hint highlighting.\n ///\n /// This will return whether the highlighted hints changed.\n pub fn update_highlighted_hints(\n &mut self,\n term: &Term,\n config: &UiConfig,\n mouse: &Mouse,\n modifiers: ModifiersState,\n ) -> bool {\n // Update vi mode cursor hint.\n let vi_highlighted_hint = if term.mode().contains(TermMode::VI) {\n let mods = ModifiersState::all();\n let point = term.vi_mode_cursor.point;\n hint::highlighted_at(term, config, point, mods)\n } else {\n None\n };\n let mut dirty = vi_highlighted_hint != self.vi_highlighted_hint;\n self.vi_highlighted_hint = vi_highlighted_hint;\n\n\n\nThe chunk is part of the `Display` struct's implementation, which is responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_50", "original_index": 50, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_50"}, "type": "Document"} +{"page_content": " // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n\n\nThis chunk is part of the `update_highlighted_hints` method of the `Display` struct, which is responsible for updating the highlighting of hints based on the mouse position and the terminal's state.", "metadata": {"chunk_id": "doc_70_chunk_51", "original_index": 51, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_51"}, "type": "Document"} +{"page_content": " // Update cursor shape.\n if highlighted_hint.is_some() {\n // If mouse changed the line, we should update the hyperlink preview, since the\n // highlighted hint could be disrupted by the old preview.\n dirty = self.hint_mouse_point.map_or(false, |p| p.line != point.line);\n self.hint_mouse_point = Some(point);\n self.window.set_mouse_cursor(CursorIcon::Pointer);\n } else if self.highlighted_hint.is_some() {\n self.hint_mouse_point = None;\n if term.mode().intersects(TermMode::MOUSE_MODE) && !term.mode().contains(TermMode::VI) {\n self.window.set_mouse_cursor(CursorIcon::Default);\n } else {\n self.window.set_mouse_cursor(CursorIcon::Text);\n }\n }\n\n\n\nThis chunk is part of the `update_highlighted_hints` function in the `Display` struct, which is responsible for updating the mouse/vi mode cursor hint highlighting in the terminal display.", "metadata": {"chunk_id": "doc_70_chunk_52", "original_index": 52, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_52"}, "type": "Document"} +{"page_content": " dirty |= self.highlighted_hint != highlighted_hint;\n self.highlighted_hint = highlighted_hint;\n\n dirty\n }\n\n #[inline(never)]\n fn draw_ime_preview(\n &mut self,\n point: Point,\n fg: Rgb,\n bg: Rgb,\n rects: &mut Vec,\n config: &UiConfig,\n ) {\n let preedit = match self.ime.preedit() {\n Some(preedit) => preedit,\n None => {\n // In case we don't have preedit, just set the popup point.\n self.window.update_ime_position(point, &self.size_info);\n return;\n },\n };\n\n let num_cols = self.size_info.columns();\n\n\n\nThe chunk is part of the `Display` struct's implementation, which is responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_53", "original_index": 53, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_53"}, "type": "Document"} +{"page_content": " // Get the visible preedit.\n let visible_text: String = match (preedit.cursor_byte_offset, preedit.cursor_end_offset) {\n (Some(byte_offset), Some(end_offset)) if end_offset > num_cols => StrShortener::new(\n &preedit.text[byte_offset..],\n num_cols,\n ShortenDirection::Right,\n Some(SHORTENER),\n ),\n _ => {\n StrShortener::new(&preedit.text, num_cols, ShortenDirection::Left, Some(SHORTENER))\n },\n }\n .collect();\n\n\n\nThis chunk is part of the `draw_ime_preview` function, which is responsible for drawing the IME (Input Method Editor) preview on the screen. The function is called when the IME is enabled and the user is typing text.", "metadata": {"chunk_id": "doc_70_chunk_54", "original_index": 54, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_54"}, "type": "Document"} +{"page_content": " let visible_len = visible_text.chars().count();\n\n let end = cmp::min(point.column.0 + visible_len, num_cols);\n let start = end.saturating_sub(visible_len);\n\n let start = Point::new(point.line, Column(start));\n let end = Point::new(point.line, Column(end - 1));\n\n let glyph_cache = &mut self.glyph_cache;\n let metrics = glyph_cache.font_metrics();\n\n self.renderer.draw_string(\n start,\n fg,\n bg,\n visible_text.chars(),\n &self.size_info,\n glyph_cache,\n );\n\n // Damage preedit inside the terminal viewport.\n if self.collect_damage() && point.line < self.size_info.screen_lines() {\n let damage = LineDamageBounds::new(start.line, 0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n\n\nThis chunk is responsible for drawing the IME (Input Method Editor) preview within the terminal display. It calculates the visible portion of the preedit text, positions it on the screen, and renders it using the renderer. It also tracks the damage to the preedit area to ensure it is redrawn correctly.", "metadata": {"chunk_id": "doc_70_chunk_55", "original_index": 55, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_55"}, "type": "Document"} +{"page_content": " // Add underline for preedit text.\n let underline = RenderLine { start, end, color: fg };\n rects.extend(underline.rects(Flags::UNDERLINE, &metrics, &self.size_info));\n\n let ime_popup_point = match preedit.cursor_end_offset {\n Some(cursor_end_offset) if cursor_end_offset != 0 => {\n let is_wide = preedit.text[preedit.cursor_byte_offset.unwrap_or_default()..]\n .chars()\n .next()\n .map(|ch| ch.width() == Some(2))\n .unwrap_or_default();\n\n\n\nThis chunk is part of the `draw_ime_preview` function, which is responsible for rendering the input method editor (IME) preview within the terminal display.", "metadata": {"chunk_id": "doc_70_chunk_56", "original_index": 56, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_56"}, "type": "Document"} +{"page_content": " let cursor_column = Column(\n (end.column.0 as isize - cursor_end_offset as isize + 1).max(0) as usize,\n );\n let cursor_point = Point::new(point.line, cursor_column);\n let cursor =\n RenderableCursor::new(cursor_point, CursorShape::HollowBlock, fg, is_wide);\n rects.extend(cursor.rects(&self.size_info, config.cursor.thickness()));\n cursor_point\n },\n _ => end,\n };\n\n\n\nThis chunk is part of the `draw_ime_preview` function, which is responsible for drawing the IME (Input Method Editor) preview on the screen.", "metadata": {"chunk_id": "doc_70_chunk_57", "original_index": 57, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_57"}, "type": "Document"} +{"page_content": " self.window.update_ime_position(ime_popup_point, &self.size_info);\n }\n\n /// Format search regex to account for the cursor and fullwidth characters.\n fn format_search(search_regex: &str, search_label: &str, max_width: usize) -> String {\n let label_len = search_label.len();\n\n // Skip `search_regex` formatting if only label is visible.\n if label_len > max_width {\n return search_label[..max_width].to_owned();\n }\n\n\n\nThe chunk is part of the `Display` struct implementation, which is responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_58", "original_index": 58, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_58"}, "type": "Document"} +{"page_content": " // The search string consists of `search_label` + `search_regex` + `cursor`.\n let mut bar_text = String::from(search_label);\n bar_text.extend(StrShortener::new(\n search_regex,\n max_width.wrapping_sub(label_len + 1),\n ShortenDirection::Left,\n Some(SHORTENER),\n ));\n\n // Add place for cursor.\n bar_text.push(' ');\n\n bar_text\n }\n\n\n\nThis chunk is part of the `Display` struct's `format_search` method, which is responsible for formatting the search regex to account for the cursor and fullwidth characters, ensuring it fits within the maximum width of the search bar.", "metadata": {"chunk_id": "doc_70_chunk_59", "original_index": 59, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_59"}, "type": "Document"} +{"page_content": " /// Draw preview for the currently highlighted `Hyperlink`.\n #[inline(never)]\n fn draw_hyperlink_preview(\n &mut self,\n config: &UiConfig,\n cursor_point: Option,\n display_offset: usize,\n ) {\n let num_cols = self.size_info.columns();\n let uris: Vec<_> = self\n .highlighted_hint\n .iter()\n .chain(&self.vi_highlighted_hint)\n .filter_map(|hint| hint.hyperlink().map(|hyperlink| hyperlink.uri()))\n .map(|uri| StrShortener::new(uri, num_cols, ShortenDirection::Right, Some(SHORTENER)))\n .collect();\n\n\n\nThe chunk is part of the `draw_hyperlink_preview` function, which is responsible for drawing a preview of the currently highlighted hyperlink in the terminal.", "metadata": {"chunk_id": "doc_70_chunk_60", "original_index": 60, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_60"}, "type": "Document"} +{"page_content": " if uris.is_empty() {\n return;\n }\n\n // The maximum amount of protected lines including the ones we'll show preview on.\n let max_protected_lines = uris.len() * 2;\n\n // Lines we shouldn't show preview on, because it'll obscure the highlighted hint.\n let mut protected_lines = Vec::with_capacity(max_protected_lines);\n if self.size_info.screen_lines() > max_protected_lines {\n // Prefer to show preview even when it'll likely obscure the highlighted hint, when\n // there's no place left for it.\n protected_lines.push(self.hint_mouse_point.map(|point| point.line));\n protected_lines.push(cursor_point.map(|point| point.line));\n }\n\n\n\nThis chunk is part of the `draw_hyperlink_preview` function, which is responsible for drawing a preview of the currently highlighted hyperlink in the terminal.", "metadata": {"chunk_id": "doc_70_chunk_61", "original_index": 61, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_61"}, "type": "Document"} +{"page_content": " // Find the line in viewport we can draw preview on without obscuring protected lines.\n let viewport_bottom = self.size_info.bottommost_line() - Line(display_offset as i32);\n let viewport_top = viewport_bottom - (self.size_info.screen_lines() - 1);\n let uri_lines = (viewport_top.0..=viewport_bottom.0)\n .rev()\n .map(|line| Some(Line(line)))\n .filter_map(|line| {\n if protected_lines.contains(&line) {\n None\n } else {\n protected_lines.push(line);\n line\n }\n })\n .take(uris.len())\n .flat_map(|line| term::point_to_viewport(display_offset, Point::new(line, Column(0))));\n\n\n\nThis chunk is part of the `draw_hyperlink_preview` function, which is responsible for drawing a preview of the currently highlighted hyperlink in the terminal.", "metadata": {"chunk_id": "doc_70_chunk_62", "original_index": 62, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_62"}, "type": "Document"} +{"page_content": " let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n for (uri, point) in uris.into_iter().zip(uri_lines) {\n // Damage the uri preview.\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n\n // Damage the uri preview for the next frame as well.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n self.renderer.draw_string(point, fg, bg, uri, &self.size_info, &mut self.glyph_cache);\n }\n }\n\n\n\nThis chunk is part of the `draw_hyperlink_preview` function, which is responsible for drawing a preview of the currently highlighted hyperlink in the terminal.", "metadata": {"chunk_id": "doc_70_chunk_63", "original_index": 63, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_63"}, "type": "Document"} +{"page_content": " /// Draw current search regex.\n #[inline(never)]\n fn draw_search(&mut self, config: &UiConfig, text: &str) {\n // Assure text length is at least num_cols.\n let num_cols = self.size_info.columns();\n let text = format!(\"{:<1$}\", text, num_cols);\n\n let point = Point::new(self.size_info.screen_lines(), Column(0));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n\n\n\nThe chunk is part of the `Display` struct's implementation, which is responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_64", "original_index": 64, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_64"}, "type": "Document"} +{"page_content": " self.renderer.draw_string(\n point,\n fg,\n bg,\n text.chars(),\n &self.size_info,\n &mut self.glyph_cache,\n );\n }\n\n /// Draw render timer.\n #[inline(never)]\n fn draw_render_timer(&mut self, config: &UiConfig) {\n if !config.debug.render_timer {\n return;\n }\n\n let timing = format!(\"{:.3} usec\", self.meter.average());\n let point = Point::new(self.size_info.screen_lines().saturating_sub(2), Column(0));\n let fg = config.colors.primary.background;\n let bg = config.colors.normal.red;\n\n\n\nThe chunk is part of the `draw_render_timer` function, which is responsible for drawing a render timer on the screen for debugging purposes.", "metadata": {"chunk_id": "doc_70_chunk_65", "original_index": 65, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_65"}, "type": "Document"} +{"page_content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, timing.len());\n self.damage_tracker.frame().damage_line(damage);\n // Damage the render timer for the next frame.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, timing.chars(), &self.size_info, glyph_cache);\n }\n\n\n\nThis chunk is part of the `draw_render_timer` function, which is responsible for drawing the render timer on the screen. It is used for debugging purposes to display the time taken to render the current frame.", "metadata": {"chunk_id": "doc_70_chunk_66", "original_index": 66, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_66"}, "type": "Document"} +{"page_content": " /// Draw an indicator for the position of a line in history.\n #[inline(never)]\n fn draw_line_indicator(\n &mut self,\n config: &UiConfig,\n total_lines: usize,\n obstructed_column: Option,\n line: usize,\n ) {\n let columns = self.size_info.columns();\n let text = format!(\"[{}/{}]\", line, total_lines - 1);\n let column = Column(self.size_info.columns().saturating_sub(text.len()));\n let point = Point::new(0, column);\n\n\n\nThe chunk is part of the `Display` struct's implementation, which is responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_67", "original_index": 67, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_67"}, "type": "Document"} +{"page_content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, columns - 1);\n self.damage_tracker.frame().damage_line(damage);\n // Damage it on the next frame in case it goes away.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let colors = &config.colors;\n let fg = colors.line_indicator.foreground.unwrap_or(colors.primary.background);\n let bg = colors.line_indicator.background.unwrap_or(colors.primary.foreground);\n\n\n\nThis chunk is part of the `draw_line_indicator` function, which is responsible for drawing an indicator for the position of a line in the terminal history. It is used to provide visual feedback to the user about the current display offset or the position of the vi mode cursor.", "metadata": {"chunk_id": "doc_70_chunk_68", "original_index": 68, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_68"}, "type": "Document"} +{"page_content": " // Do not render anything if it would obscure the vi mode cursor.\n if obstructed_column.map_or(true, |obstructed_column| obstructed_column < column) {\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, text.chars(), &self.size_info, glyph_cache);\n }\n }\n\n /// Returns `true` if damage information should be collected, `false` otherwise.\n #[inline]\n fn collect_damage(&self) -> bool {\n matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) || self.damage_tracker.debug\n }\n\n\n\nThis chunk is part of the `draw_line_indicator` function, which is responsible for drawing an indicator for the position of a line in the terminal history. It checks if rendering the indicator would obscure the vi mode cursor, and if not, it draws the indicator text. The `collect_damage` function is a helper function that determines whether damage information should be collected, based on the window type and whether debug mode is enabled.", "metadata": {"chunk_id": "doc_70_chunk_69", "original_index": 69, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_69"}, "type": "Document"} +{"page_content": " /// Highlight damaged rects.\n ///\n /// This function is for debug purposes only.\n fn highlight_damage(&self, render_rects: &mut Vec) {\n for damage_rect in &self.damage_tracker.shape_frame_damage(self.size_info.into()) {\n let x = damage_rect.x as f32;\n let height = damage_rect.height as f32;\n let width = damage_rect.width as f32;\n let y = damage_y_to_viewport_y(&self.size_info, damage_rect) as f32;\n let render_rect = RenderRect::new(x, y, width, height, DAMAGE_RECT_COLOR, 0.5);\n\n\n\nThe chunk is part of the `Display` struct implementation, which is responsible for managing the display subsystem, including window management, font rasterization, and GPU drawing.", "metadata": {"chunk_id": "doc_70_chunk_70", "original_index": 70, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_70"}, "type": "Document"} +{"page_content": " render_rects.push(render_rect);\n }\n }\n\n /// Request a new frame for a window on Wayland.\n fn request_frame(&mut self, scheduler: &mut Scheduler) {\n // Mark that we've used a frame.\n self.window.has_frame = false;\n\n // Get the display vblank interval.\n let monitor_vblank_interval = 1_000_000.\n / self\n .window\n .current_monitor()\n .and_then(|monitor| monitor.refresh_rate_millihertz())\n .unwrap_or(60_000) as f64;\n\n\n\nThe chunk is part of the `Display` struct's implementation, specifically the `highlight_damage` and `request_frame` methods. These methods are responsible for highlighting damaged rendering regions for debugging purposes and requesting a new frame for a window on Wayland, respectively.", "metadata": {"chunk_id": "doc_70_chunk_71", "original_index": 71, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_71"}, "type": "Document"} +{"page_content": " // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\n\n\nThis chunk is part of the `Display` struct's `request_frame` method, which is responsible for requesting a new frame for a window on Wayland. It calculates the timeout for the next frame based on the display's refresh rate and schedules a `Frame` event with the scheduler.", "metadata": {"chunk_id": "doc_70_chunk_72", "original_index": 72, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_72"}, "type": "Document"} +{"page_content": "impl Drop for Display {\n fn drop(&mut self) {\n // Switch OpenGL context before dropping, otherwise objects (like programs) from other\n // contexts might be deleted when dropping renderer.\n self.make_current();\n unsafe {\n ManuallyDrop::drop(&mut self.renderer);\n ManuallyDrop::drop(&mut self.context);\n ManuallyDrop::drop(&mut self.surface);\n }\n }\n}\n\n\n\nThe chunk is the implementation of the `Drop` trait for the `Display` struct, which is responsible for managing the window, font rasterization, and GPU drawing in the display subsystem.", "metadata": {"chunk_id": "doc_70_chunk_73", "original_index": 73, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_73"}, "type": "Document"} +{"page_content": "/// Input method state.\n#[derive(Debug, Default)]\npub struct Ime {\n /// Whether the IME is enabled.\n enabled: bool,\n\n /// Current IME preedit.\n preedit: Option,\n}\n\nimpl Ime {\n #[inline]\n pub fn set_enabled(&mut self, is_enabled: bool) {\n if is_enabled {\n self.enabled = is_enabled\n } else {\n // Clear state when disabling IME.\n *self = Default::default();\n }\n }\n\n #[inline]\n pub fn is_enabled(&self) -> bool {\n self.enabled\n }\n\n #[inline]\n pub fn set_preedit(&mut self, preedit: Option) {\n self.preedit = preedit;\n }\n\n #[inline]\n pub fn preedit(&self) -> Option<&Preedit> {\n self.preedit.as_ref()\n }\n}\n\n#[derive(Debug, Default, PartialEq, Eq)]\npub struct Preedit {\n /// The preedit text.\n text: String,\n\n\n\nThe chunk describes the input method state and preedit structure used in the display subsystem of the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_70_chunk_74", "original_index": 74, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_74"}, "type": "Document"} +{"page_content": " /// Byte offset for cursor start into the preedit text.\n ///\n /// `None` means that the cursor is invisible.\n cursor_byte_offset: Option,\n\n /// The cursor offset from the end of the preedit in char width.\n cursor_end_offset: Option,\n}\n\nimpl Preedit {\n pub fn new(text: String, cursor_byte_offset: Option) -> Self {\n let cursor_end_offset = if let Some(byte_offset) = cursor_byte_offset {\n // Convert byte offset into char offset.\n let cursor_end_offset =\n text[byte_offset..].chars().fold(0, |acc, ch| acc + ch.width().unwrap_or(1));\n\n\n\nThe chunk describes the `Preedit` struct, which is part of the `Ime` (Input Method Editor) state in the display subsystem of the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_70_chunk_75", "original_index": 75, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_75"}, "type": "Document"} +{"page_content": " Some(cursor_end_offset)\n } else {\n None\n };\n\n Self { text, cursor_byte_offset, cursor_end_offset }\n }\n}\n\n/// Pending renderer updates.\n///\n/// All renderer updates are cached to be applied just before rendering, to avoid platform-specific\n/// rendering issues.\n#[derive(Debug, Default, Copy, Clone)]\npub struct RendererUpdate {\n /// Should resize the window.\n resize: bool,\n\n /// Clear font caches.\n clear_font_cache: bool,\n}\n\n/// Struct for safe in-place replacement.\n///\n/// This struct allows easily replacing struct fields that provide `self -> Self` methods in-place,\n/// without having to deal with constantly unwrapping the underlying [`Option`].\nstruct Replaceable(Option);\n\n\n\nThe chunk is part of the `Preedit` struct definition, which is used to represent the current state of an input method editor (IME) in the `Display` module of the Alacritty terminal emulator.", "metadata": {"chunk_id": "doc_70_chunk_76", "original_index": 76, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_76"}, "type": "Document"} +{"page_content": "impl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\n\n\nThe chunk defines a struct called `Replaceable` that provides a safe way to replace the contents of a container in-place, without having to deal with constantly unwrapping an `Option`.", "metadata": {"chunk_id": "doc_70_chunk_77", "original_index": 77, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_77"}, "type": "Document"} +{"page_content": "impl Deref for Replaceable {\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n self.get()\n }\n}\n\nimpl DerefMut for Replaceable {\n fn deref_mut(&mut self) -> &mut Self::Target {\n self.get_mut()\n }\n}\n\n/// The frame timer state.\npub struct FrameTimer {\n /// Base timestamp used to compute sync points.\n base: Instant,\n\n /// The last timestamp we synced to.\n last_synced_timestamp: Instant,\n\n /// The refresh rate we've used to compute sync timestamps.\n refresh_interval: Duration,\n}\n\nimpl FrameTimer {\n pub fn new() -> Self {\n let now = Instant::now();\n Self { base: now, last_synced_timestamp: now, refresh_interval: Duration::ZERO }\n }\n\n\n\nThe chunk is part of the `Replaceable` struct implementation and the `FrameTimer` struct implementation within the `Display` module of the Alacritty terminal emulator codebase.", "metadata": {"chunk_id": "doc_70_chunk_78", "original_index": 78, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_78"}, "type": "Document"} +{"page_content": " /// Compute the delay that we should use to achieve the target frame\n /// rate.\n pub fn compute_timeout(&mut self, refresh_interval: Duration) -> Duration {\n let now = Instant::now();\n\n // Handle refresh rate change.\n if self.refresh_interval != refresh_interval {\n self.base = now;\n self.last_synced_timestamp = now;\n self.refresh_interval = refresh_interval;\n return refresh_interval;\n }\n\n let next_frame = self.last_synced_timestamp + self.refresh_interval;\n\n\n\nThe chunk is part of the `FrameTimer` struct, which is responsible for computing the delay that should be used to achieve the target frame rate for the display.", "metadata": {"chunk_id": "doc_70_chunk_79", "original_index": 79, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_79"}, "type": "Document"} +{"page_content": " if next_frame < now {\n // Redraw immediately if we haven't drawn in over `refresh_interval` microseconds.\n let elapsed_micros = (now - self.base).as_micros() as u64;\n let refresh_micros = self.refresh_interval.as_micros() as u64;\n self.last_synced_timestamp =\n now - Duration::from_micros(elapsed_micros % refresh_micros);\n Duration::ZERO\n } else {\n // Redraw on the next `refresh_interval` clock tick.\n self.last_synced_timestamp = next_frame;\n next_frame - now\n }\n }\n}\n\n\n\nThis chunk is part of the `FrameTimer` struct, which is responsible for computing the delay that should be used to achieve the target frame rate for the display.", "metadata": {"chunk_id": "doc_70_chunk_80", "original_index": 80, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_80"}, "type": "Document"} +{"page_content": "/// Calculate the cell dimensions based on font metrics.\n///\n/// This will return a tuple of the cell width and height.\n#[inline]\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n let offset_x = f64::from(config.font.offset.x);\n let offset_y = f64::from(config.font.offset.y);\n (\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n )\n}\n\n\n\nThis chunk calculates the cell dimensions based on font metrics, which is used to determine the size of the terminal window in the display subsystem.", "metadata": {"chunk_id": "doc_70_chunk_81", "original_index": 81, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_81"}, "type": "Document"} +{"page_content": "/// Calculate the size of the window given padding, terminal dimensions and cell size.\nfn window_size(\n config: &UiConfig,\n dimensions: Dimensions,\n cell_width: f32,\n cell_height: f32,\n scale_factor: f32,\n) -> PhysicalSize {\n let padding = config.window.padding(scale_factor);\n\n let grid_width = cell_width * dimensions.columns.max(MIN_COLUMNS) as f32;\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n PhysicalSize::new(width as u32, height as u32)\n}\n\n\nThe chunk calculates the size of the window based on the terminal dimensions, cell size, and padding specified in the configuration.", "metadata": {"chunk_id": "doc_70_chunk_82", "original_index": 82, "pid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_82"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThe provided chunk is the Apache Software License 2.0 header that is commonly used in the source code of Apache projects.", "metadata": {"chunk_id": "doc_71_chunk_0", "original_index": 0, "pid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n\n\nThis chunk defines an interface called `HasWeightCol` that provides a parameter for specifying the weight column in a dataset. It is part of the `org.apache.flink.ml.common.param` package, which likely contains common parameters and interfaces used across various machine learning components in the Apache Flink project.", "metadata": {"chunk_id": "doc_71_chunk_1", "original_index": 1, "pid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_1"}, "type": "Document"} +{"page_content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n\n\nThis chunk contains the Apache Software Foundation license information for the code in the document.", "metadata": {"chunk_id": "doc_72_chunk_0", "original_index": 0, "pid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_0"}, "type": "Document"} +{"page_content": "from typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n\n\nThis chunk defines the `_VectorSlicerParams` class, which is a part of the `VectorSlicer` transformer in the PyFlink ML library. The `_VectorSlicerParams` class handles the parameters for the `VectorSlicer` transformer, including the validation of the `indices` parameter.", "metadata": {"chunk_id": "doc_72_chunk_1", "original_index": 1, "pid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_1"}, "type": "Document"} +{"page_content": " \"\"\"\n Params for :class:`VectorSlicer`.\n \"\"\"\n\n INDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n\n def __init__(self, java_params):\n super(_VectorSlicerParams, self).__init__(java_params)\n\n def set_indices(self, *ind: int):\n return self.set(self.INDICES, ind)\n\n def get_indices(self) -> Tuple[int, ...]:\n return self.get(self.INDICES)\n\n @property\n def indices(self) -> Tuple[int, ...]:\n return self.get_indices()\n\n\n\nThe chunk defines the parameters for the `VectorSlicer` class, which is a feature transformer that selects a subset of features from a vector column.", "metadata": {"chunk_id": "doc_72_chunk_2", "original_index": 2, "pid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_2"}, "type": "Document"} +{"page_content": "\nclass VectorSlicer(JavaFeatureTransformer, _VectorSlicerParams):\n \"\"\"\n A Transformer that transforms a vector to a new feature, which is a sub-array of\n the original feature.It is useful for extracting features from a given vector.\n\n Note that duplicate features are not allowed, so there can be no overlap between\n selected indices. If the max value of the indices is greater than the size of\n the input vector, it throws an IllegalArgumentException.\n \"\"\"\n\n def __init__(self, java_model=None):\n super(VectorSlicer, self).__init__(java_model)\n\n @classmethod\n def _java_transformer_package_name(cls) -> str:\n return \"vectorslicer\"\n\n @classmethod\n def _java_transformer_class_name(cls) -> str:\n return \"VectorSlicer\"\n\n\nThis chunk defines a Python class called `VectorSlicer` that is a feature transformer for Apache Flink's machine learning library. It is used to extract a subset of features from a vector input.", "metadata": {"chunk_id": "doc_72_chunk_3", "original_index": 3, "pid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_3"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThe provided chunk is the Apache Software Foundation license header that is commonly found at the beginning of source code files.", "metadata": {"chunk_id": "doc_73_chunk_0", "original_index": 0, "pid": "883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.iteration.operator.allround;\n\nimport org.apache.flink.annotation.Internal;\n\nimport java.util.function.Supplier;\n\n/**\n * Operators or UDF implements this method would be provided with an supplier that provides the\n * current rounds of the current element.\n */\n@Internal\npublic interface EpochAware {\n\n void setEpochSupplier(Supplier epochSupplier);\n}\n\n\nThis chunk defines an interface called `EpochAware` that is part of the `org.apache.flink.iteration.operator.allround` package. The interface provides a method `setEpochSupplier` that allows operators or user-defined functions to be provided with a supplier that provides the current round of the current element.", "metadata": {"chunk_id": "doc_73_chunk_1", "original_index": 1, "pid": "883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd_1"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.\n */\n\n\n\nThe provided chunk is the Apache Software Foundation license header that is commonly used at the beginning of source code files.", "metadata": {"chunk_id": "doc_74_chunk_0", "original_index": 0, "pid": "8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.ml.linalg;\n\nimport org.apache.flink.api.common.typeinfo.TypeInfo;\nimport org.apache.flink.ml.linalg.typeinfo.VectorWithNormTypeInfoFactory;\n\n/** A vector with its norm. */\n@TypeInfo(VectorWithNormTypeInfoFactory.class)\npublic class VectorWithNorm {\n public final Vector vector;\n\n public final double l2Norm;\n\n public VectorWithNorm(Vector vector) {\n this(vector, BLAS.norm2(vector));\n }\n\n public VectorWithNorm(Vector vector, double l2Norm) {\n this.vector = vector;\n this.l2Norm = l2Norm;\n }\n}\n\n\nThis chunk defines a class called `VectorWithNorm` that represents a vector along with its L2 norm. The class is part of the `org.apache.flink.ml.linalg` package and is annotated with `@TypeInfo(VectorWithNormTypeInfoFactory.class)` to provide type information for the class.", "metadata": {"chunk_id": "doc_74_chunk_1", "original_index": 1, "pid": "8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5_1"}, "type": "Document"} +{"page_content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\nfrom typing import List\n\n\n\nThis chunk contains the Apache Software Foundation license information for the Python Flink Machine Learning (PyFlink ML) library.", "metadata": {"chunk_id": "doc_75_chunk_0", "original_index": 0, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_0"}, "type": "Document"} +{"page_content": "from pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n\n\nThis chunk is part of a PyFlink ML test suite for the UnivariateFeatureSelector and UnivariateFeatureSelectorModel classes. It sets up the input data table and the test case class.", "metadata": {"chunk_id": "doc_75_chunk_1", "original_index": 1, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_1"}, "type": "Document"} +{"page_content": " (1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,\n 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)),\n (1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,\n 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)),\n (1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,\n 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)),\n\n\nThe chunk represents sample data used for testing the UnivariateFeatureSelector class in the PyFlink ML library.", "metadata": {"chunk_id": "doc_75_chunk_2", "original_index": 2, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_2"}, "type": "Document"} +{"page_content": " (2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,\n 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)),\n (2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,\n 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)),\n (2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,\n 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)),\n (2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,\n 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)),\n\n\nThe chunk represents a portion of the input data used in the UnivariateFeatureSelectorTest class to test the UnivariateFeatureSelector and UnivariateFeatureSelectorModel classes in the pyflink.ml.feature.univariatefeatureselector module.", "metadata": {"chunk_id": "doc_75_chunk_3", "original_index": 3, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_3"}, "type": "Document"} +{"page_content": " (2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,\n 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)),\n (3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,\n 8.22809049e-01, 3.26739456e-01, 7.88268404e-01)),\n (3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,\n 7.33931213e-01, 1.42554396e-01, 7.11225605e-01)),\n (3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,\n 2.51532056e-01, 2.82729807e-01, 7.16245686e-01)),\n\n\nThe chunk appears to be a part of the input data used for testing the UnivariateFeatureSelector class in the PyFlink ML library. The data consists of rows with a label and a feature vector, which are used to test the feature selection functionality of the UnivariateFeatureSelector.", "metadata": {"chunk_id": "doc_75_chunk_4", "original_index": 4, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_4"}, "type": "Document"} +{"page_content": " (3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,\n 5.93898886e-01, 5.68425656e-01, 8.49762330e-01)),\n (3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,\n 5.49682879e-01, 8.59931442e-01, 4.88677978e-02)),\n (4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,\n 9.23509168e-01, 1.16995043e-01, 5.91894106e-03)),\n (4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,\n 4.09421563e-01, 5.09375719e-01, 5.93157850e-01)),\n (4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,\n 3.62654055e-01, 2.43437468e-01, 7.13052838e-01)),\n\n\nThe chunk represents a portion of the input data used for testing the UnivariateFeatureSelector class in the PyFlink ML library. The data consists of feature vectors and corresponding labels, which are used to evaluate the feature selection algorithm.", "metadata": {"chunk_id": "doc_75_chunk_5", "original_index": 5, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_5"}, "type": "Document"} +{"page_content": " (4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,\n 8.24123861e-01, 5.84074506e-01, 6.54461558e-01)),\n (4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,\n 5.16336729e-01, 9.99776159e-01, 3.15769738e-01)),\n ],\n type_info=Types.ROW_NAMED(\n ['label', 'features'],\n [Types.INT(), DenseVectorTypeInfo()])\n ))\n\n\n\nThe chunk represents the input data for the UnivariateFeatureSelectorTest class, which is used to test the UnivariateFeatureSelector and UnivariateFeatureSelectorModel classes in the PyFlink ML library.", "metadata": {"chunk_id": "doc_75_chunk_6", "original_index": 6, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_6"}, "type": "Document"} +{"page_content": " def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n\n\nThe chunk is part of a test suite for the `UnivariateFeatureSelector` class, which is a feature selection algorithm in the PyFlink ML library. The test suite checks the default parameter values and the ability to set and retrieve the parameters of the `UnivariateFeatureSelector` class.", "metadata": {"chunk_id": "doc_75_chunk_7", "original_index": 7, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_7"}, "type": "Document"} +{"page_content": " univariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n self.assertEqual('test_features', univariate_feature_selector.features_col)\n self.assertEqual('test_label', univariate_feature_selector.label_col)\n self.assertEqual('test_output', univariate_feature_selector.output_col)\n self.assertEqual('continuous', univariate_feature_selector.feature_type)\n self.assertEqual('categorical', univariate_feature_selector.label_type)\n self.assertEqual('fpr', univariate_feature_selector.selection_mode)\n self.assertEqual(0.01, univariate_feature_selector.selection_threshold)\n\n\n\nThe chunk is part of a test case for the UnivariateFeatureSelector class in the PyFlink ML library. It demonstrates how to set various parameters of the UnivariateFeatureSelector and verifies that the parameters are set correctly.", "metadata": {"chunk_id": "doc_75_chunk_8", "original_index": 8, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_8"}, "type": "Document"} +{"page_content": " def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n\n\nThe chunk is part of a test suite for the `UnivariateFeatureSelector` class, which is a feature selection algorithm in the PyFlink ML library. The test suite checks the output schema of the `UnivariateFeatureSelector` model.", "metadata": {"chunk_id": "doc_75_chunk_9", "original_index": 9, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_9"}, "type": "Document"} +{"page_content": " def test_fit_and_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n output = model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_get_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n model_data = model.get_model_data()[0]\n self.assertEqual(['indices'], model_data.get_schema().get_field_names())\n\n\n\nThis chunk is part of a test suite for the UnivariateFeatureSelector and UnivariateFeatureSelectorModel classes in the PyFlink ML library. It tests the fit and predict functionality of the UnivariateFeatureSelector, as well as the ability to get and set the model data.", "metadata": {"chunk_id": "doc_75_chunk_10", "original_index": 10, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_10"}, "type": "Document"} +{"page_content": " model_rows = [result for result in\n self.t_env.to_data_stream(model_data).execute_and_collect()]\n self.assertEqual(1, len(model_rows))\n self.assertListEqual([0, 2, 1], model_rows[0][0])\n\n def test_set_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model_a = selector.fit(self.input_table)\n model_data = model_a.get_model_data()[0]\n\n model_b = UnivariateFeatureSelectorModel() \\\n .set_model_data(model_data)\n update_existing_params(model_b, model_a)\n\n output = model_b.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n\n\nThe chunk is part of a test case for the `UnivariateFeatureSelectorModel` class, which is used to select the most important features from a dataset. The test case checks the functionality of setting and getting the model data, and verifying the output of the transformed dataset.", "metadata": {"chunk_id": "doc_75_chunk_11", "original_index": 11, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_11"}, "type": "Document"} +{"page_content": " def test_save_load_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n reloaded_selector = self.save_and_reload(selector)\n model = reloaded_selector.fit(self.input_table)\n reloaded_model = self.save_and_reload(model)\n output = reloaded_model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n\n\nThe provided chunk is a test case for the `UnivariateFeatureSelectorModel` class, which is part of the PyFlink ML library. The test case demonstrates the ability to save, load, and predict using the `UnivariateFeatureSelectorModel`.", "metadata": {"chunk_id": "doc_75_chunk_12", "original_index": 12, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_12"}, "type": "Document"} +{"page_content": " def verify_output_result(\n self, output: Table,\n field_names: List[str],\n feature_col: str,\n output_col: str,\n indices: List[int]):\n collected_results = [result for result in\n self.t_env.to_data_stream(output).execute_and_collect()]\n for item in collected_results:\n item.set_field_names(field_names)\n self.assertEqual(len(indices), item[output_col].size())\n for i in range(0, len(indices)):\n self.assertEqual(item[feature_col].get(indices[i]),\n item[output_col].get(i))\n\n\nThe provided chunk is a method named `verify_output_result` that is part of the `UnivariateFeatureSelectorTest` class, which is used to verify the output of the `UnivariateFeatureSelector` and `UnivariateFeatureSelectorModel` classes in the PyFlink ML library.", "metadata": {"chunk_id": "doc_75_chunk_13", "original_index": 13, "pid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_13"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.regression;\n\n\n\nThe provided chunk is the header and package declaration of a Java class that tests the LinearRegression and LinearRegressionModel classes in the Apache Flink machine learning library.", "metadata": {"chunk_id": "doc_76_chunk_0", "original_index": 0, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_0"}, "type": "Document"} +{"page_content": "import org.apache.flink.api.common.typeinfo.TypeInformation;\nimport org.apache.flink.api.common.typeinfo.Types;\nimport org.apache.flink.api.java.typeutils.RowTypeInfo;\nimport org.apache.flink.ml.linalg.SparseVector;\nimport org.apache.flink.ml.linalg.Vectors;\nimport org.apache.flink.ml.linalg.typeinfo.DenseVectorTypeInfo;\nimport org.apache.flink.ml.regression.linearregression.LinearRegression;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModel;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModelData;\nimport org.apache.flink.ml.util.ParamUtils;\nimport org.apache.flink.ml.util.TestUtils;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.test.util.AbstractTestBase;\nimport org.apache.flink.types.Row;\n\n\n\nThis chunk contains the import statements for the classes and utilities used in the LinearRegressionTest class, which tests the functionality of the LinearRegression and LinearRegressionModel classes in the Apache Flink ML library.", "metadata": {"chunk_id": "doc_76_chunk_1", "original_index": 1, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_1"}, "type": "Document"} +{"page_content": "import org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.lang3.RandomUtils;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\n/** Tests {@link LinearRegression} and {@link LinearRegressionModel}. */\npublic class LinearRegressionTest extends AbstractTestBase {\n\n @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();\n\n private StreamExecutionEnvironment env;\n\n private StreamTableEnvironment tEnv;\n\n\n\nThis chunk contains the test class for the LinearRegression and LinearRegressionModel classes in the Apache Flink ML regression package. It includes setup code for the test environment, as well as various test methods to verify the functionality of the regression algorithms.", "metadata": {"chunk_id": "doc_76_chunk_2", "original_index": 2, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_2"}, "type": "Document"} +{"page_content": " private static final List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(2, 4), 10.0, 1.0),\n Row.of(Vectors.dense(2, 2), 6.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(1, 2), 5.0, 1.0),\n Row.of(Vectors.dense(5, 3), 11.0, 1.0));\n\n private static final double[] expectedCoefficient = new double[] {1.141, 1.829};\n\n private static final double TOLERANCE = 1e-7;\n\n private static final double PREDICTION_TOLERANCE = 0.1;\n\n private static final double COEFFICIENT_TOLERANCE = 0.1;\n\n private Table trainDataTable;\n\n\n\nThis chunk contains the training data, expected coefficients, and tolerance values used in the LinearRegressionTest class, which tests the LinearRegression and LinearRegressionModel classes in the Apache Flink ML regression package.", "metadata": {"chunk_id": "doc_76_chunk_3", "original_index": 3, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_3"}, "type": "Document"} +{"page_content": " @Before\n public void before() {\n env = TestUtils.getExecutionEnvironment();\n tEnv = StreamTableEnvironment.create(env);\n Collections.shuffle(trainData);\n trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n }\n\n\n\nThe provided chunk is part of a test suite for the LinearRegression and LinearRegressionModel classes in the Apache Flink ML library. The chunk sets up the test environment by creating a StreamExecutionEnvironment, StreamTableEnvironment, and a test data table before each test is executed.", "metadata": {"chunk_id": "doc_76_chunk_4", "original_index": 4, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_4"}, "type": "Document"} +{"page_content": " @SuppressWarnings(\"unchecked\")\n private void verifyPredictionResult(Table output, String labelCol, String predictionCol)\n throws Exception {\n List predResult = IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect());\n for (Row predictionRow : predResult) {\n double label = ((Number) predictionRow.getField(labelCol)).doubleValue();\n double prediction = (double) predictionRow.getField(predictionCol);\n assertTrue(Math.abs(prediction - label) / label < PREDICTION_TOLERANCE);\n }\n }\n\n\n\nThe provided chunk is a private method named `verifyPredictionResult` that is used to verify the prediction results of a linear regression model. It iterates through the output table, retrieves the label and prediction values, and checks if the prediction is within a specified tolerance range of the actual label.", "metadata": {"chunk_id": "doc_76_chunk_5", "original_index": 5, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_5"}, "type": "Document"} +{"page_content": " @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n\n\nThe provided chunk is a unit test that verifies the default parameter values and the ability to set custom parameter values for the `LinearRegression` class, which is part of the Apache Flink machine learning library.", "metadata": {"chunk_id": "doc_76_chunk_6", "original_index": 6, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_6"}, "type": "Document"} +{"page_content": " linearRegression\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setMaxIter(1000)\n .setTol(0.001)\n .setLearningRate(0.5)\n .setGlobalBatchSize(1000)\n .setReg(0.1)\n .setElasticNet(0.5)\n .setPredictionCol(\"test_predictionCol\");\n assertEquals(\"test_features\", linearRegression.getFeaturesCol());\n assertEquals(\"test_label\", linearRegression.getLabelCol());\n assertEquals(\"test_weight\", linearRegression.getWeightCol());\n\n\nThe provided chunk sets various parameters of a LinearRegression object, such as the feature column, label column, weight column, maximum iterations, tolerance, learning rate, global batch size, regularization, and elastic net parameters. This chunk is part of a test suite that verifies the functionality of the LinearRegression and LinearRegressionModel classes in the Apache Flink ML library.", "metadata": {"chunk_id": "doc_76_chunk_7", "original_index": 7, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_7"}, "type": "Document"} +{"page_content": " assertEquals(1000, linearRegression.getMaxIter());\n assertEquals(0.001, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.5, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(1000, linearRegression.getGlobalBatchSize());\n assertEquals(0.1, linearRegression.getReg(), TOLERANCE);\n assertEquals(0.5, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"test_predictionCol\", linearRegression.getPredictionCol());\n }\n\n\n\nThis chunk is part of a test suite for the LinearRegression and LinearRegressionModel classes in the Apache Flink ML library. It tests the setting and retrieval of various parameters of the LinearRegression class, such as the maximum number of iterations, tolerance, learning rate, global batch size, regularization, and the prediction column name.", "metadata": {"chunk_id": "doc_76_chunk_8", "original_index": 8, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_8"}, "type": "Document"} +{"page_content": " @Test\n public void testOutputSchema() {\n Table tempTable = trainDataTable.as(\"test_features\", \"test_label\", \"test_weight\");\n LinearRegression linearRegression =\n new LinearRegression()\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setPredictionCol(\"test_predictionCol\");\n Table output = linearRegression.fit(trainDataTable).transform(tempTable)[0];\n assertEquals(\n Arrays.asList(\"test_features\", \"test_label\", \"test_weight\", \"test_predictionCol\"),\n output.getResolvedSchema().getColumnNames());\n }\n\n\n\nThe chunk tests the output schema of the LinearRegression model, ensuring that the expected columns are present in the output table after fitting and transforming the input data.", "metadata": {"chunk_id": "doc_76_chunk_9", "original_index": 9, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_9"}, "type": "Document"} +{"page_content": " @Test\n public void testFitAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n\n\nThe provided chunk is a unit test that tests the functionality of the `LinearRegression` and `LinearRegressionModel` classes. The test checks that the `LinearRegression` model can be fit to the training data and that the resulting model can be used to make predictions on the same data.", "metadata": {"chunk_id": "doc_76_chunk_10", "original_index": 10, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_10"}, "type": "Document"} +{"page_content": " @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n\n\nThe chunk tests the input type conversion functionality of the LinearRegression class, where it converts the input data types to SparseVector, Integer, and Integer, and then verifies the prediction results.", "metadata": {"chunk_id": "doc_76_chunk_11", "original_index": 11, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_11"}, "type": "Document"} +{"page_content": " @Test\n public void testSaveLoadAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n linearRegression =\n TestUtils.saveAndReload(\n tEnv,\n linearRegression,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegression::load);\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n\nThe chunk is part of a test suite for the `LinearRegression` and `LinearRegressionModel` classes in the Apache Flink ML library. The test suite verifies the functionality of saving, loading, and predicting with the linear regression model.", "metadata": {"chunk_id": "doc_76_chunk_12", "original_index": 12, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_12"}, "type": "Document"} +{"page_content": " model =\n TestUtils.saveAndReload(\n tEnv,\n model,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegressionModel::load);\n assertEquals(\n Collections.singletonList(\"coefficient\"),\n model.getModelData()[0].getResolvedSchema().getColumnNames());\n Table output = model.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n\n\nThe chunk is part of a test suite for the LinearRegression and LinearRegressionModel classes in the Apache Flink ML library. It tests the ability to save, load, and use a trained LinearRegressionModel.", "metadata": {"chunk_id": "doc_76_chunk_13", "original_index": 13, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_13"}, "type": "Document"} +{"page_content": " @Test\n public void testGetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n assertNotNull(modelData);\n assertEquals(1, modelData.size());\n assertArrayEquals(\n expectedCoefficient, modelData.get(0).coefficient.values, COEFFICIENT_TOLERANCE);\n }\n\n\n\nThe chunk is a unit test that verifies the functionality of the `getModelData()` method in the `LinearRegressionModel` class. This method is used to retrieve the model data, including the coefficient values, from the trained linear regression model.", "metadata": {"chunk_id": "doc_76_chunk_14", "original_index": 14, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_14"}, "type": "Document"} +{"page_content": " @Test\n public void testSetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n LinearRegressionModel newModel = new LinearRegressionModel();\n ParamUtils.updateExistingParams(newModel, model.getParamMap());\n newModel.setModelData(model.getModelData());\n Table output = newModel.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n\n\nThe chunk tests the ability to set the model data of a LinearRegressionModel and verify the prediction results.", "metadata": {"chunk_id": "doc_76_chunk_15", "original_index": 15, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_15"}, "type": "Document"} +{"page_content": " @Test\n public void testMoreSubtaskThanData() throws Exception {\n List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0));\n\n Table trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n\n\n\nThis chunk tests the behavior of the LinearRegression algorithm when the number of subtasks is greater than the amount of training data.", "metadata": {"chunk_id": "doc_76_chunk_16", "original_index": 16, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_16"}, "type": "Document"} +{"page_content": " LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n\n\nThe chunk is part of a test suite for the LinearRegression and LinearRegressionModel classes in the Apache Flink ML library. It tests the behavior of the LinearRegression model when the number of subtasks is greater than the number of data points, and also tests the regularization functionality of the model.", "metadata": {"chunk_id": "doc_76_chunk_17", "original_index": 17, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_17"}, "type": "Document"} +{"page_content": " @SuppressWarnings(\"unchecked\")\n private void checkRegularization(double reg, double elasticNet, double[] expectedCoefficient)\n throws Exception {\n LinearRegressionModel model =\n new LinearRegression()\n .setWeightCol(\"weight\")\n .setReg(reg)\n .setElasticNet(elasticNet)\n .fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n final double errorTol = 1e-3;\n assertArrayEquals(expectedCoefficient, modelData.get(0).coefficient.values, errorTol);\n }\n}\n\n\nThe chunk is a private method named `checkRegularization` that tests the linear regression model with different regularization parameters. It is part of the `LinearRegressionTest` class, which is a test suite for the `LinearRegression` and `LinearRegressionModel` classes in the Apache Flink ML library.", "metadata": {"chunk_id": "doc_76_chunk_18", "original_index": 18, "pid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_18"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThis chunk contains the Apache Software Foundation license information for the Java code in the document.", "metadata": {"chunk_id": "doc_77_chunk_0", "original_index": 0, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.iteration.datacache.nonkeyed;\n\nimport org.apache.flink.annotation.Internal;\nimport org.apache.flink.api.common.typeutils.TypeSerializer;\nimport org.apache.flink.core.fs.Path;\nimport org.apache.flink.core.memory.DataOutputView;\nimport org.apache.flink.core.memory.DataOutputViewStreamWrapper;\nimport org.apache.flink.core.memory.MemorySegment;\nimport org.apache.flink.runtime.memory.MemoryAllocationException;\nimport org.apache.flink.table.runtime.util.MemorySegmentPool;\n\nimport javax.annotation.Nullable;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Optional;\n\n/** A class that writes cache data to memory segments. */\n@Internal\nclass MemorySegmentWriter implements SegmentWriter {\n\n /** The tool to serialize received records into bytes. */\n private final TypeSerializer serializer;\n\n\n\nThis chunk is part of the implementation of the `MemorySegmentWriter` class, which is responsible for writing cache data to memory segments. It is part of the `org.apache.flink.iteration.datacache.nonkeyed` package, which is likely related to Flink's data caching functionality.", "metadata": {"chunk_id": "doc_77_chunk_1", "original_index": 1, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_1"}, "type": "Document"} +{"page_content": " /** The pre-allocated path to hold cached records into the file system. */\n private final Path path;\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The output stream to write serialized content to memory segments. */\n private final ManagedMemoryOutputStream outputStream;\n\n /** The wrapper view of the output stream to be used with TypeSerializer API. */\n private final DataOutputView outputView;\n\n /** The number of records added so far. */\n private int count;\n\n\n\nThe provided chunk represents the class-level fields of the `MemorySegmentWriter` class, which is responsible for writing cache data to memory segments. This class is part of the Apache Flink project and is used for managing the caching of data in the iteration data cache.", "metadata": {"chunk_id": "doc_77_chunk_2", "original_index": 2, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_2"}, "type": "Document"} +{"page_content": " MemorySegmentWriter(\n TypeSerializer serializer,\n Path path,\n MemorySegmentPool segmentPool,\n long expectedSize)\n throws MemoryAllocationException {\n this.serializer = serializer;\n this.path = path;\n this.segmentPool = segmentPool;\n this.outputStream = new ManagedMemoryOutputStream(segmentPool, expectedSize);\n this.outputView = new DataOutputViewStreamWrapper(outputStream);\n this.count = 0;\n }\n\n @Override\n public boolean addRecord(T record) throws IOException {\n if (outputStream.getPos() >= DataCacheWriter.MAX_SEGMENT_SIZE) {\n return false;\n }\n try {\n serializer.serialize(record, outputView);\n count++;\n return true;\n } catch (RuntimeException e) {\n if (e.getCause() instanceof MemoryAllocationException) {\n return false;\n }\n throw e;\n }\n }\n\n\n\nThe provided chunk is the constructor and the `addRecord()` method of the `MemorySegmentWriter` class, which is responsible for writing cache data to memory segments. This class is part of the `org.apache.flink.iteration.datacache.nonkeyed` package and is used within the Apache Flink project.", "metadata": {"chunk_id": "doc_77_chunk_3", "original_index": 3, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_3"}, "type": "Document"} +{"page_content": " @Override\n public Optional finish() throws IOException {\n if (count > 0) {\n return Optional.of(new Segment(path, count, outputStream.getSegments()));\n } else {\n segmentPool.returnAll(outputStream.getSegments());\n return Optional.empty();\n }\n }\n\n /** An output stream subclass that accepts bytes and writes them to memory segments. */\n private static class ManagedMemoryOutputStream extends OutputStream {\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The number of bytes in a memory segment. */\n private final int pageSize;\n\n /** The memory segments containing written bytes. */\n private final List segments = new ArrayList<>();\n\n\n\nThe provided chunk is part of the `MemorySegmentWriter` class, which is responsible for writing cache data to memory segments. The `finish()` method is used to complete the writing process and return the written data as a `Segment` object. The `ManagedMemoryOutputStream` class is a custom output stream implementation that manages the allocation and usage of memory segments to store the written data.", "metadata": {"chunk_id": "doc_77_chunk_4", "original_index": 4, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_4"}, "type": "Document"} +{"page_content": " /** The index of the segment that currently accepts written bytes. */\n private int segmentIndex;\n\n /** The number of bytes in the current segment that have been written. */\n private int segmentOffset;\n\n /** The number of bytes that have been written so far. */\n private long globalOffset;\n\n /** The number of bytes that have been allocated so far. */\n private long allocatedBytes;\n\n public ManagedMemoryOutputStream(MemorySegmentPool segmentPool, long expectedSize)\n throws MemoryAllocationException {\n this.segmentPool = segmentPool;\n this.pageSize = segmentPool.pageSize();\n ensureCapacity(Math.max(expectedSize, 1L));\n }\n\n\n\nThis chunk of code represents the internal state and constructor of the `ManagedMemoryOutputStream` class, which is a custom output stream implementation that writes data to memory segments managed by a `MemorySegmentPool`.", "metadata": {"chunk_id": "doc_77_chunk_5", "original_index": 5, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_5"}, "type": "Document"} +{"page_content": " public long getPos() {\n return globalOffset;\n }\n\n public List getSegments() {\n return segments;\n }\n\n @Override\n public void write(int b) throws IOException {\n write(new byte[] {(byte) b}, 0, 1);\n }\n\n @Override\n public void write(@Nullable byte[] b, int off, int len) throws IOException {\n try {\n ensureCapacity(globalOffset + len);\n } catch (MemoryAllocationException e) {\n throw new RuntimeException(e);\n }\n\n\n\nThe provided chunk is part of the `ManagedMemoryOutputStream` class, which is a subclass of `OutputStream` that writes bytes to memory segments. This class is used within the `MemorySegmentWriter` class, which is responsible for writing cache data to memory segments.", "metadata": {"chunk_id": "doc_77_chunk_6", "original_index": 6, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_6"}, "type": "Document"} +{"page_content": " while (len > 0) {\n int currentLen = Math.min(len, pageSize - segmentOffset);\n segments.get(segmentIndex).put(segmentOffset, b, off, currentLen);\n segmentOffset += currentLen;\n globalOffset += currentLen;\n if (segmentOffset >= pageSize) {\n segmentIndex++;\n segmentOffset = 0;\n }\n off += currentLen;\n len -= currentLen;\n }\n }\n\n private void ensureCapacity(long capacity) throws MemoryAllocationException {\n if (allocatedBytes >= capacity) {\n return;\n }\n\n int required =\n (int) (capacity % pageSize == 0 ? capacity / pageSize : capacity / pageSize + 1)\n - segments.size();\n\n\n\nThe provided chunk is part of the `ManagedMemoryOutputStream` class, which is a subclass of `OutputStream` that writes bytes to memory segments. The chunk includes the implementation of the `write()` method, which handles writing the bytes to the memory segments, and the `ensureCapacity()` method, which ensures that there is enough memory available to write the bytes.", "metadata": {"chunk_id": "doc_77_chunk_7", "original_index": 7, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_7"}, "type": "Document"} +{"page_content": " List allocatedSegments = new ArrayList<>();\n for (int i = 0; i < required; i++) {\n MemorySegment memorySegment = segmentPool.nextSegment();\n if (memorySegment == null) {\n segmentPool.returnAll(allocatedSegments);\n throw new MemoryAllocationException();\n }\n allocatedSegments.add(memorySegment);\n }\n\n segments.addAll(allocatedSegments);\n allocatedBytes += (long) allocatedSegments.size() * pageSize;\n }\n }\n}\n\n\nThis chunk of code is responsible for allocating and managing memory segments for the `ManagedMemoryOutputStream` class, which is used to write cache data to memory segments in the `MemorySegmentWriter` class.", "metadata": {"chunk_id": "doc_77_chunk_8", "original_index": 8, "pid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_8"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThe provided chunk is the Apache Software Foundation license header that is commonly found at the beginning of source code files.", "metadata": {"chunk_id": "doc_78_chunk_0", "original_index": 0, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.iteration.operator.coordinator;\n\nimport org.apache.flink.iteration.IterationID;\nimport org.apache.flink.iteration.operator.event.GloballyAlignedEvent;\nimport org.apache.flink.iteration.operator.event.SubtaskAlignedEvent;\nimport org.apache.flink.runtime.jobgraph.OperatorID;\nimport org.apache.flink.runtime.operators.coordination.EventReceivingTasks;\nimport org.apache.flink.runtime.operators.coordination.MockOperatorCoordinatorContext;\nimport org.apache.flink.runtime.operators.coordination.OperatorEvent;\nimport org.apache.flink.util.TestLogger;\n\nimport org.junit.Test;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.function.BiFunction;\n\nimport static org.junit.Assert.assertEquals;\n\n/** Tests the behavior of {@link HeadOperatorCoordinator}. */\npublic class HeadOperatorCoordinatorTest extends TestLogger {\n\n\n\nThis chunk is the beginning of a test class that tests the behavior of the `HeadOperatorCoordinator` class, which is part of the `org.apache.flink.iteration.operator.coordinator` package.", "metadata": {"chunk_id": "doc_78_chunk_1", "original_index": 1, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_1"}, "type": "Document"} +{"page_content": " @Test(timeout = 60000L)\n public void testForwardEvents() throws Exception {\n IterationID iterationId = new IterationID();\n List operatorIds = Arrays.asList(new OperatorID(), new OperatorID());\n List parallelisms = Arrays.asList(2, 3);\n List receivingTasks =\n Arrays.asList(\n EventReceivingTasks.createForRunningTasks(),\n EventReceivingTasks.createForRunningTasks());\n List coordinators = new ArrayList<>();\n\n\n\nThe provided chunk is a part of a test case that verifies the behavior of the `HeadOperatorCoordinator` class, which is responsible for coordinating the execution of a head operator in an iterative data processing pipeline.", "metadata": {"chunk_id": "doc_78_chunk_2", "original_index": 2, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_2"}, "type": "Document"} +{"page_content": " int totalParallelism = parallelisms.stream().mapToInt(i -> i).sum();\n\n for (int i = 0; i < operatorIds.size(); ++i) {\n HeadOperatorCoordinator coordinator =\n createCoordinator(iterationId, parallelisms.get(i), totalParallelism);\n setAllSubtasksReady(coordinator, receivingTasks.get(i), parallelisms.get(i));\n coordinators.add(coordinator);\n }\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(2, j, false)));\n checkSentEvent(1, new GloballyAlignedEvent(2, false), receivingTasks, parallelisms);\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(3, 0, false)));\n checkSentEvent(2, new GloballyAlignedEvent(3, true), receivingTasks, parallelisms);\n }\n\n\n\nThis chunk of code is part of a test suite for the `HeadOperatorCoordinator` class, which is responsible for handling events in an iterative processing pipeline. The test suite verifies the behavior of the coordinator, including its ability to forward events to the appropriate tasks.", "metadata": {"chunk_id": "doc_78_chunk_3", "original_index": 3, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_3"}, "type": "Document"} +{"page_content": " private HeadOperatorCoordinator createCoordinator(\n IterationID iterationId, int parallelism, int totalHeadParallelism) {\n MockOperatorCoordinatorContext context =\n new MockOperatorCoordinatorContext(new OperatorID(), parallelism);\n return (HeadOperatorCoordinator)\n new HeadOperatorCoordinator.HeadOperatorCoordinatorProvider(\n new OperatorID(), iterationId, totalHeadParallelism)\n .create(context);\n }\n\n private void setAllSubtasksReady(\n HeadOperatorCoordinator coordinator,\n EventReceivingTasks receivingTasks,\n int parallelism) {\n for (int i = 0; i < parallelism; i++) {\n coordinator.executionAttemptReady(i, 0, receivingTasks.createGatewayForSubtask(i, 0));\n }\n }\n\n\n\nThe provided chunk contains two private methods that are used to create a `HeadOperatorCoordinator` instance and set all subtasks as ready for the coordinator, respectively. These methods are likely part of the test suite for the `HeadOperatorCoordinator` class, which is the main focus of the overall document.", "metadata": {"chunk_id": "doc_78_chunk_4", "original_index": 4, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_4"}, "type": "Document"} +{"page_content": " private void receiveEvent(\n List coordinators,\n List parallelisms,\n BiFunction> eventFactory)\n throws Exception {\n for (int i = 0; i < coordinators.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n List events = eventFactory.apply(i, j);\n for (OperatorEvent event : events) {\n coordinators.get(i).handleEventFromOperator(j, 0, event);\n }\n }\n }\n }\n\n\n\nThe provided chunk is a private method named `receiveEvent` that is part of the `HeadOperatorCoordinatorTest` class, which is responsible for testing the behavior of the `HeadOperatorCoordinator` class.", "metadata": {"chunk_id": "doc_78_chunk_5", "original_index": 5, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_5"}, "type": "Document"} +{"page_content": " private void checkSentEvent(\n int expectedNumEvents,\n GloballyAlignedEvent expectedLastEvent,\n List receivingTasks,\n List parallelisms)\n throws InterruptedException {\n for (int i = 0; i < parallelisms.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n while (true) {\n List events = receivingTasks.get(i).getSentEventsForSubtask(j);\n if (events.size() < expectedNumEvents) {\n Thread.sleep(50);\n continue;\n }\n\n assertEquals(expectedLastEvent, events.get(events.size() - 1));\n break;\n }\n }\n }\n }\n}\n\n\nThe provided chunk is a private method named `checkSentEvent` that is used to verify the expected number of events and the last event sent to the receiving tasks in the `HeadOperatorCoordinatorTest` class. This method is part of the test suite for the `HeadOperatorCoordinator` class, which is responsible for handling events in the Flink iteration operator.", "metadata": {"chunk_id": "doc_78_chunk_6", "original_index": 6, "pid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_6"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThis chunk contains the Apache Software Foundation license information for the Java code in the overall document.", "metadata": {"chunk_id": "doc_79_chunk_0", "original_index": 0, "pid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_0"}, "type": "Document"} +{"page_content": "package org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n\n\nThis chunk of code is part of an example program that demonstrates the usage of the StopWordsRemover feature in the Apache Flink machine learning library. The program sets up the execution environment, creates a StopWordsRemover instance, and applies it to a sample input data stream.", "metadata": {"chunk_id": "doc_79_chunk_1", "original_index": 1, "pid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_1"}, "type": "Document"} +{"page_content": " // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n\n\nThis chunk generates input data for a Flink streaming application that demonstrates the use of a StopWordsRemover feature engineering component.", "metadata": {"chunk_id": "doc_79_chunk_2", "original_index": 2, "pid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_2"}, "type": "Document"} +{"page_content": " // Creates a StopWordsRemover object and initializes its parameters.\n StopWordsRemover remover =\n new StopWordsRemover().setInputCols(\"input\").setOutputCols(\"output\");\n\n // Uses the StopWordsRemover object for feature transformations.\n Table outputTable = remover.transform(inputTable)[0];\n\n // Extracts and displays the results.\n for (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n }\n}\n\n\nThis code snippet demonstrates the usage of the StopWordsRemover feature engineering component in a Flink ML application. It creates a StopWordsRemover instance, applies it to an input data stream, and then extracts and displays the transformed results.", "metadata": {"chunk_id": "doc_79_chunk_3", "original_index": 3, "pid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_3"}, "type": "Document"} +{"page_content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n\n\nThis chunk contains the Apache Software Foundation license information for the code provided in the document.", "metadata": {"chunk_id": "doc_80_chunk_0", "original_index": 0, "pid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_0"}, "type": "Document"} +{"page_content": "# Simple program that creates an IndexToStringModel instance and uses it\n# for feature engineering.\n\nfrom pyflink.common import Types\nfrom pyflink.datastream import StreamExecutionEnvironment\nfrom pyflink.ml.feature.stringindexer import IndexToStringModel\nfrom pyflink.table import StreamTableEnvironment\n\n# create a new StreamExecutionEnvironment\nenv = StreamExecutionEnvironment.get_execution_environment()\n\n# create a StreamTableEnvironment\nt_env = StreamTableEnvironment.create(env)\n\n# generate input data\npredict_table = t_env.from_data_stream(\n env.from_collection([\n (0, 3),\n (1, 2),\n ],\n type_info=Types.ROW_NAMED(\n ['input_col1', 'input_col2'],\n [Types.INT(), Types.INT()])\n ))\n\n\n\nThis chunk of code demonstrates the creation and usage of an IndexToStringModel instance for feature engineering in a PyFlink application. It sets up the necessary execution environment, creates a StreamTableEnvironment, and generates input data for the model.", "metadata": {"chunk_id": "doc_80_chunk_1", "original_index": 1, "pid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_1"}, "type": "Document"} +{"page_content": "# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n\n\nThis chunk demonstrates the creation and initialization of an IndexToStringModel instance, which is then used for feature engineering on input data.", "metadata": {"chunk_id": "doc_80_chunk_2", "original_index": 2, "pid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_2"}, "type": "Document"} +{"page_content": "# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n\n\nThis chunk of code extracts and displays the input and output values from the transformed data using the IndexToStringModel.", "metadata": {"chunk_id": "doc_80_chunk_3", "original_index": 3, "pid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_3"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThe provided chunk is the Apache Software License 2.0 header that is commonly found at the beginning of source code files.", "metadata": {"chunk_id": "doc_81_chunk_0", "original_index": 0, "pid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_0"}, "type": "Document"} +{"page_content": "#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\n\n\nThis chunk of code appears to be the implementation of the `ColorEndPatternConverter` class, which is part of the log4cxx logging library. The class is responsible for resetting the color of the output when formatting a logging event.", "metadata": {"chunk_id": "doc_81_chunk_1", "original_index": 1, "pid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_1"}, "type": "Document"} +{"page_content": "PatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n\n\nThis chunk of code defines the `newInstance` and `format` methods of the `ColorEndPatternConverter` class, which is part of the log4cxx library. The `newInstance` method creates a singleton instance of the `ColorEndPatternConverter` class, while the `format` method appends a color reset sequence to the output string.", "metadata": {"chunk_id": "doc_81_chunk_2", "original_index": 2, "pid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_2"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThe provided chunk is the Apache Software License 2.0 header that is commonly used in open-source software projects.", "metadata": {"chunk_id": "doc_82_chunk_0", "original_index": 0, "pid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_0"}, "type": "Document"} +{"page_content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n\n\nThe provided chunk is the implementation of the `XMLFilenameFilter` class, which is part of the Apache log4cxx library. The class is responsible for filtering log messages based on the filename pattern specified in the constructor.", "metadata": {"chunk_id": "doc_82_chunk_1", "original_index": 1, "pid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_1"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThe provided chunk is the Apache Software Foundation license header that appears at the beginning of the C++ source code file for the SMTPAppenderTestCase class, which is part of the log4cxx library.", "metadata": {"chunk_id": "doc_83_chunk_0", "original_index": 0, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_0"}, "type": "Document"} +{"page_content": "#define LOG4CXX_TEST 1\n#include \n\n#include \n#include \"../appenderskeletontestcase.h\"\n#include \n#include \n#include \n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace net\n{\n\nclass MockTriggeringEventEvaluator :\n\tpublic virtual spi::TriggeringEventEvaluator\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(MockTriggeringEventEvaluator)\n\t\tLOG4CXX_CAST_ENTRY(spi::TriggeringEventEvaluator)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\n\nThis chunk of code appears to be part of a unit test suite for the SMTPAppender class in the log4cxx library. It includes the necessary header files, defines a mock triggering event evaluator class, and sets up the testing environment.", "metadata": {"chunk_id": "doc_83_chunk_1", "original_index": 1, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_1"}, "type": "Document"} +{"page_content": "\t\tMockTriggeringEventEvaluator()\n\t\t{\n\t\t}\n\n\t\tbool isTriggeringEvent(const spi::LoggingEventPtr& event) override\n\t\t{\n\t\t\treturn true;\n\t\t}\n\tprivate:\n\t\tMockTriggeringEventEvaluator(const MockTriggeringEventEvaluator&);\n\t\tMockTriggeringEventEvaluator& operator=(const MockTriggeringEventEvaluator&);\n};\n}\n}\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\nusing namespace log4cxx::net;\n\nIMPLEMENT_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\n\n\nThe provided chunk is a part of the implementation of the `MockTriggeringEventEvaluator` class, which is a custom implementation of the `spi::TriggeringEventEvaluator` interface used in the `SMTPAppenderTestCase` unit tests.", "metadata": {"chunk_id": "doc_83_chunk_2", "original_index": 2, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_2"}, "type": "Document"} +{"page_content": "\n/**\n Unit tests of log4cxx::SocketAppender\n */\nclass SMTPAppenderTestCase : public AppenderSkeletonTestCase\n{\n\t\tLOGUNIT_TEST_SUITE(SMTPAppenderTestCase);\n\t\t//\n\t\t// tests inherited from AppenderSkeletonTestCase\n\t\t//\n\t\tLOGUNIT_TEST(testDefaultThreshold);\n\t\tLOGUNIT_TEST(testSetOptionThreshold);\n\t\tLOGUNIT_TEST(testTrigger);\n\t\tLOGUNIT_TEST(testInvalid);\n//#define LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n#ifdef LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n\t\t// This test requires the following environment variables:\n\t\t// LOG4CXX_TEST_EMAIL_RECIPIENT - where the email is sent\n\t\t// LOG4CXX_TEST_SMTP_HOST_NAME - the email server\n\t\tLOGUNIT_TEST(testValid);\n#endif\n\t\tLOGUNIT_TEST_SUITE_END();\n\n\n\nThis chunk contains the unit tests for the SMTPAppender class, which is part of the log4cxx library. The tests cover various aspects of the SMTPAppender's functionality, including default threshold, setting option threshold, triggering events, handling invalid configurations, and validating the email sending process.", "metadata": {"chunk_id": "doc_83_chunk_3", "original_index": 3, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_3"}, "type": "Document"} +{"page_content": "\n\tpublic:\n\n\t\tAppenderSkeleton* createAppenderSkeleton() const\n\t\t{\n\t\t\treturn new log4cxx::net::SMTPAppender();\n\t\t}\n\n\t\tvoid setUp()\n\t\t{\n\t\t}\n\n\t\tvoid tearDown()\n\t\t{\n\t\t\tLogManager::resetConfiguration();\n\t\t}\n\n\t\t/**\n\t\t * Tests that triggeringPolicy element will set evaluator.\n\t\t */\n\t\tvoid testTrigger()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppender1.xml\");\n\t\t\tauto appender = log4cxx::cast(Logger::getRootLogger()->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto evaluator = appender->getEvaluator();\n\t\t\tLOGUNIT_ASSERT(evaluator);\n\t\t\tLOGUNIT_ASSERT_EQUAL(true, evaluator->instanceof(MockTriggeringEventEvaluator::getStaticClass()));\n\t\t}\n\n\n\nThis chunk is part of the unit tests for the SMTPAppender class in the log4cxx library. It includes the implementation of the createAppenderSkeleton() method, the setUp() and tearDown() methods, and the testTrigger() method which tests that the triggeringPolicy element in the configuration file sets the appropriate evaluator for the SMTPAppender.", "metadata": {"chunk_id": "doc_83_chunk_4", "original_index": 4, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_4"}, "type": "Document"} +{"page_content": "\t\tvoid testInvalid()\n\t\t{\n\t\t\tauto appender = std::make_shared();\n\t\t\tappender->setSMTPHost(LOG4CXX_STR(\"smtp.invalid\"));\n\t\t\tappender->setTo(LOG4CXX_STR(\"you@example.invalid\"));\n\t\t\tappender->setFrom(LOG4CXX_STR(\"me@example.invalid\"));\n\t\t\tappender->setLayout(std::make_shared());\n\t\t\tPool p;\n\t\t\tappender->activateOptions(p);\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\troot->addAppender(appender);\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\");\n\t\t\tLOG4CXX_ERROR(root, \"Sending Message\"); // The DefaultEvaluator should trigger e-mail generation\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(eh->errorReported());\n\t\t}\n\n\n\nThis chunk is part of a unit test suite for the SMTPAppender class in the log4cxx library. The test case checks the behavior of the SMTPAppender when the SMTP host or email addresses are invalid.", "metadata": {"chunk_id": "doc_83_chunk_5", "original_index": 5, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_5"}, "type": "Document"} +{"page_content": "\n\t\tvoid testValid()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppenderValid.xml\");\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\\n\\nThis paragraph should be preceeded by a blank line.\");\n\n\t\t\tauto appender = log4cxx::cast(root->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(!eh->errorReported());\n\t\t}\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(SMTPAppenderTestCase);\n\n\n\nThis chunk is part of the unit tests for the SMTPAppender class in the log4cxx library. It tests the valid configuration of the SMTPAppender and ensures that the email is sent correctly.", "metadata": {"chunk_id": "doc_83_chunk_6", "original_index": 6, "pid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_6"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n\nThe provided chunk is the Apache Software License header that is commonly found at the beginning of source code files.", "metadata": {"chunk_id": "doc_84_chunk_0", "original_index": 0, "pid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_0"}, "type": "Document"} +{"page_content": " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\n\n\nThis chunk appears to be the header and beginning of a C++ source file that implements the BasicConfigurator class from the log4cxx logging library.", "metadata": {"chunk_id": "doc_84_chunk_1", "original_index": 1, "pid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_1"}, "type": "Document"} +{"page_content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n\n\nThis chunk of code is part of the Apache Log4cxx library, which is a logging library for C++. The code defines the `BasicConfigurator` class, which provides a simple way to configure the logging system. The `configure()` methods set up the logging layout and appender, while the `resetConfiguration()` method resets the logging configuration.", "metadata": {"chunk_id": "doc_84_chunk_2", "original_index": 2, "pid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_2"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThe provided chunk is the Apache Software Foundation license header that is typically included at the beginning of source code files.", "metadata": {"chunk_id": "doc_85_chunk_0", "original_index": 0, "pid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a_0"}, "type": "Document"} +{"page_content": "#include \n/* Prevent error C2491: 'std::numpunct<_Elem>::id': definition of dllimport static data member not allowed */\n#if defined(_MSC_VER) && (LOG4CXX_UNICHAR_API || LOG4CXX_LOGCHAR_IS_UNICHAR)\n#define __FORCE_INSTANCE\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\ntypedef std::basic_stringstream LogStream;\n\nLogString LOG4CXX_NS::hexdump(const void* bytes, uint32_t len, HexdumpFlags flags){\n\tLogString ret;\n\tconst uint8_t* bytes_u8 = static_cast(bytes);\n\tLogStream sstream;\n#if LOG4CXX_LOGCHAR_IS_WCHAR\n\tconst wchar_t fill_char = L'0';\n\tconst wchar_t space_fill_char = L' ';\n#else\n\tconst logchar fill_char = '0';\n\tconst logchar space_fill_char = ' ';\n#endif\n\n\n\nThis chunk of code is part of the implementation of the `hexdump` function in the log4cxx library, which is a popular logging library for C++. The function takes a buffer of bytes and generates a hexadecimal dump of the contents, with the offset, hexadecimal representation, and ASCII representation of the bytes.", "metadata": {"chunk_id": "doc_85_chunk_1", "original_index": 1, "pid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a_1"}, "type": "Document"} +{"page_content": "\tif(flags & HexdumpFlags::AddStartingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\tfor(uint32_t offset = 0; offset < len; offset += 16){\n\t\tif(offset != 0){\n\t\t\tsstream << LOG4CXX_EOL;\n\t\t}\n\n\t\t// Print out the offset\n\t\tsstream << std::hex << std::setw(8) << std::setfill(fill_char) << offset << std::resetiosflags(std::ios_base::fmtflags(0));\n\n\t\tsstream << std::setw(0) << LOG4CXX_STR(\" \");\n\n\t\t// Print out the first 8 bytes\n\t\tfor(int byte = 0; byte < 8; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 8){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tsstream << std::setfill(space_fill_char);\n\t\t\tif(byte != 8){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n\n\nThis chunk of code is responsible for printing the hexadecimal representation of the input data, with the offset and ASCII representation. It is part of the `hexdump` function in the log4cxx library, which is used for logging and debugging purposes.", "metadata": {"chunk_id": "doc_85_chunk_2", "original_index": 2, "pid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a_2"}, "type": "Document"} +{"page_content": "\t\tsstream << LOG4CXX_STR(\" \");\n\n\t\t// Print out the last 8 bytes\n\t\tfor(int byte = 8; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 15){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tif(byte != 15){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n\t\t// Print out the ASCII text\n\t\tsstream << LOG4CXX_STR(\" |\");\n\t\tfor(int byte = 0; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tif(std::isprint(bytes_u8[offset + byte])){\n\t\t\t\tlogchar to_append = bytes_u8[offset + byte];\n\t\t\t\tsstream << to_append;\n\t\t\t}else{\n\t\t\t\tsstream << LOG4CXX_STR(\".\");\n\t\t\t}\n\t\t}\n\t\tsstream << LOG4CXX_STR(\"|\");\n\t}\n\n\tif(flags & HexdumpFlags::AddEndingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\treturn sstream.str();\n}\n\n\nThis chunk of code is responsible for printing the hexadecimal representation of the input data, along with its ASCII representation, in a formatted manner. It is part of a larger function that generates a hexadump of the input data.", "metadata": {"chunk_id": "doc_85_chunk_3", "original_index": 3, "pid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a_3"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\n\n\nThis chunk contains the Apache Software Foundation license information and the necessary include statements for the DenyAllFilter unit test in the log4cxx library.", "metadata": {"chunk_id": "doc_86_chunk_0", "original_index": 0, "pid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_0"}, "type": "Document"} +{"page_content": "using namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n\n\nThis chunk of code contains unit tests for the DenyAllFilter class, which is part of the log4cxx logging library. The tests verify that the DenyAllFilter.decide() method returns the expected Filter.DENY value.", "metadata": {"chunk_id": "doc_86_chunk_1", "original_index": 1, "pid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_1"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \n#include \n#include \n#include \n\n\n\nThe chunk is the header and license information for a C++ source file that is part of the log4cxx logging library.", "metadata": {"chunk_id": "doc_87_chunk_0", "original_index": 0, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_0"}, "type": "Document"} +{"page_content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/threadfilter.h\"\n#include \n#include \n#include \n#include \n#include \"testchar.h\"\n#include \n#include \n#include \n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(MinimumTestCase)\n{\n\tLOGUNIT_TEST_SUITE(MinimumTestCase);\n\tLOGUNIT_TEST(simple);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\troot->removeAllAppenders();\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\n\nThis chunk of code appears to be part of a unit test suite for the log4cxx logging library. It includes various utility classes and headers, and defines a test case class called `MinimumTestCase` that sets up and tears down the test environment.", "metadata": {"chunk_id": "doc_87_chunk_1", "original_index": 1, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_1"}, "type": "Document"} +{"page_content": "\tvoid simple()\n\t{\n\t\tLayoutPtr layout = LayoutPtr(new SimpleLayout());\n\t\tAppenderPtr appender = FileAppenderPtr(new FileAppender(layout, LOG4CXX_STR(\"output/simple\"), false));\n\t\troot->addAppender(appender);\n\t\tcommon();\n\n\t\tLOGUNIT_ASSERT(Compare::compare(LOG4CXX_FILE(\"output/simple\"), LOG4CXX_FILE(\"witness/simple\")));\n\t}\n\n\tstd::string createMessage(int i, Pool & pool)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = 0;\n\n\t\t// In the lines below, the logger names are chosen as an aid in\n\t\t// remembering their level values. In general, the logger names\n\t\t// have no bearing to level values.\n\t\tLoggerPtr ERRlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"ERR\"));\n\t\tERRlogger->setLevel(Level::getError());\n\n\n\nThis chunk of code is part of the `MinimumTestCase` class, which is a unit test case for the log4cxx logging library. The `simple()` method sets up a simple logging configuration with a `FileAppender` and a `SimpleLayout`, and then calls the `common()` method to perform various logging operations. The `createMessage()` method is a helper function used to generate log messages. The `common()` method sets up several loggers with different log levels and performs various logging operations to test the logging functionality.", "metadata": {"chunk_id": "doc_87_chunk_2", "original_index": 2, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_2"}, "type": "Document"} +{"page_content": "\t\tLoggerPtr INF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF\"));\n\t\tINF->setLevel(Level::getInfo());\n\n\t\tLoggerPtr INF_ERR = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR\"));\n\t\tINF_ERR->setLevel(Level::getError());\n\n\t\tLoggerPtr DEB = Logger::getLogger(LOG4CXX_TEST_STR(\"DEB\"));\n\t\tDEB->setLevel(Level::getDebug());\n\n\t\t// Note: categories with undefined level\n\t\tLoggerPtr INF_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.UNDEF\"));\n\t\tLoggerPtr INF_ERR_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR.UNDEF\"));\n\t\tLoggerPtr UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"UNDEF\"));\n\n\n\nThis chunk of code sets up various loggers with different log levels, including loggers with undefined levels, within the `common()` method of the `MinimumTestCase` class.", "metadata": {"chunk_id": "doc_87_chunk_3", "original_index": 3, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_3"}, "type": "Document"} +{"page_content": "\t\tstd::string msg(\"Message \");\n\n\t\tPool pool;\n\n\t\t// These should all log.----------------------------\n\t\tLOG4CXX_FATAL(ERRlogger, createMessage(i, pool));\n\t\ti++; //0\n\t\tLOG4CXX_ERROR(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF, createMessage(i, pool));\n\t\ti++; // 2\n\t\tLOG4CXX_ERROR(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_UNDEF, createMessage(i, pool));\n\t\ti++; //6\n\t\tLOG4CXX_ERROR(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR, createMessage(i, pool));\n\t\ti++; // 10\n\t\tLOG4CXX_ERROR(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_ERROR(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\n\nThis chunk of code is part of the `common()` function within the `MinimumTestCase` class, which is a unit test case for the log4cxx logging library. The code demonstrates the logging behavior of different logger instances with various log levels.", "metadata": {"chunk_id": "doc_87_chunk_4", "original_index": 4, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_4"}, "type": "Document"} +{"page_content": "\t\tLOG4CXX_FATAL(DEB, createMessage(i, pool));\n\t\ti++; //14\n\t\tLOG4CXX_ERROR(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(DEB, createMessage(i, pool));\n\t\ti++;\n\n\t\t// defaultLevel=DEBUG\n\t\tLOG4CXX_FATAL(UNDEF, createMessage(i, pool));\n\t\ti++; // 19\n\t\tLOG4CXX_ERROR(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\t// The following should not log\n\t\tLOG4CXX_WARN(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_DEBUG(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\n\nThis chunk of code is part of the `common()` function within the `MinimumTestCase` class, which is a unit test for the log4cxx logging library. The chunk demonstrates the logging behavior of different logger levels and configurations.", "metadata": {"chunk_id": "doc_87_chunk_5", "original_index": 5, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_5"}, "type": "Document"} +{"page_content": "\t\tLOG4CXX_WARN(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\tLOG4CXX_INFO(INF, LOG4CXX_TEST_STR(\"Messages should bear numbers 0 through 23.\"));\n\t}\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\nprivate:\n\tstatic const File FILTERED;\n};\n\n\nconst File MinimumTestCase::FILTERED(\"output/minimumfiltered\");\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(MinimumTestCase);\n\n\nThis chunk is part of the `common()` function within the `MinimumTestCase` class, which is testing the logging functionality of the log4cxx library.", "metadata": {"chunk_id": "doc_87_chunk_6", "original_index": 6, "pid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_6"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThe provided chunk is the Apache Software License header that is commonly found at the beginning of source code files.", "metadata": {"chunk_id": "doc_88_chunk_0", "original_index": 0, "pid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_0"}, "type": "Document"} +{"page_content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n\n\nThis chunk of code defines the NDCPatternConverter class, which is part of the log4cxx library's pattern package. The class is responsible for returning the event's NDC (Nested Diagnostic Context) in a StringBuffer.", "metadata": {"chunk_id": "doc_88_chunk_1", "original_index": 1, "pid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_1"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \"testchar.h\"\n\n\nThe chunk is the opening license and include statements of the C++ source file for the FMTTestCase unit test suite.", "metadata": {"chunk_id": "doc_89_chunk_0", "original_index": 0, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_0"}, "type": "Document"} +{"page_content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/iso8601filter.h\"\n#include \"util/absolutetimefilter.h\"\n#include \"util/relativetimefilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/threadfilter.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/filenamefilter.h\"\n#include \"vectorappender.h\"\n#include \n#include \n#include \n#include \n#include \n#include \n\n#define REGEX_STR(x) x\n#define PAT0 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* - Message [0-9]\\\\{1,2\\\\}\")\n#define PAT1 ISO8601_PAT REGEX_STR(\" \") PAT0\n#define PAT2 ABSOLUTE_DATE_AND_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT3 ABSOLUTE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT4 RELATIVE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT5 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* : Message [0-9]\\\\{1,2\\\\}\")\n\n\nThis chunk contains various utility header files and preprocessor macros used for pattern matching and filtering log messages in the FMTTestCase class.", "metadata": {"chunk_id": "doc_89_chunk_1", "original_index": 1, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_1"}, "type": "Document"} +{"page_content": "#define PAT6 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) .*patternlayouttest.cpp\\\\([0-9]\\\\{1,4\\\\}\\\\): Message [0-9]\\\\{1,3\\\\}\")\n#define PAT11a REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ log4j.PatternLayoutTest: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT11b REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ root: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT12 REGEX_STR(\"^\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) \")\\\n\tREGEX_STR(\".*patternlayouttest.cpp([0-9]\\\\{1,4\\\\}): \")\\\n\tREGEX_STR(\"Message [0-9]\\\\{1,2\\\\}\")\n#define PAT_MDC_1 REGEX_STR(\"\")\n\n\n\nThis chunk contains various preprocessor macros that define regular expression patterns used for parsing log messages in the FMTTestCase class.", "metadata": {"chunk_id": "doc_89_chunk_2", "original_index": 2, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_2"}, "type": "Document"} +{"page_content": "using namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(FMTTestCase)\n{\n\tLOGUNIT_TEST_SUITE(FMTTestCase);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST(test1_expanded);\n\tLOGUNIT_TEST(test10);\n//\tLOGUNIT_TEST(test_date);\n\tLOGUNIT_TEST_SUITE_END();\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\tMDC::clear();\n\t\tlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"java.org.apache.log4j.PatternLayoutTest\"));\n\t}\n\n\n\nThis chunk is part of a C++ unit test suite for the FMTLayout class in the log4cxx logging library. The test suite includes several test methods that configure the FMTLayout and verify its behavior.", "metadata": {"chunk_id": "doc_89_chunk_3", "original_index": 3, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_3"}, "type": "Document"} +{"page_content": "\tvoid tearDown()\n\t{\n\t\tMDC::clear();\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid test1()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test1_expanded()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1_expanded.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\n\nThe provided chunk is part of a C++ unit test class named `FMTTestCase` that tests the functionality of the `FMTLayout` class, which is a part of the log4cxx logging library. The chunk includes the `tearDown()` method, which clears the Mapped Diagnostic Context (MDC) and resets the logger repository configuration, as well as the `test1()` and `test1_expanded()` methods, which configure the logger and run the common test logic.", "metadata": {"chunk_id": "doc_89_chunk_4", "original_index": 4, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_4"}, "type": "Document"} +{"page_content": "\tvoid test10()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout10.properties\"));\n\t\tcommon();\n\n\t\tControlFilter filter1;\n\t\tfilter1 << PAT6;\n\t\tThreadFilter filter2;\n\t\tLineNumberFilter filter3;\n\t\tFilenameFilter filenameFilter(__FILE__, \"patternlayouttest.cpp\");\n\n\n\t\tstd::vector filters;\n\t\tfilters.push_back(&filenameFilter);\n\t\tfilters.push_back(&filter1);\n\t\tfilters.push_back(&filter2);\n\t\tfilters.push_back(&filter3);\n\n\n\nThe provided chunk is part of the `FMTTestCase` class, which is a test suite for the `FMTLayout` class in the Apache Log4cxx library. The `test10()` method is one of the test cases that configures the logging system, generates some log messages, and then applies various filters to the output to verify the expected format.", "metadata": {"chunk_id": "doc_89_chunk_5", "original_index": 5, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_5"}, "type": "Document"} +{"page_content": "\n\t\ttry\n\t\t{\n\t\t\tTransformer::transform(TEMP, FILTERED, filters);\n\t\t}\n\t\tcatch (UnexpectedFormatException& e)\n\t\t{\n\t\t\tstd::cout << \"UnexpectedFormatException :\" << e.what() << std::endl;\n\t\t\tthrow;\n\t\t}\n\n\t\tLOGUNIT_ASSERT(Compare::compare(FILTERED, LOG4CXX_FILE(\"witness/patternLayout.10\")));\n\t}\n\n\tvoid test_date(){\n\t\tstd::tm tm = {};\n\t\tstd::stringstream ss(\"2013-04-11 08:35:34\");\n\t\tss >> std::get_time(&tm, \"%Y-%m-%d %H:%M:%S\");\n\t\tauto tp = std::chrono::system_clock::from_time_t(std::mktime(&tm));\n\t\tuint64_t micros = std::chrono::duration_cast(tp.time_since_epoch()).count();\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction([micros](){\n\t\t\treturn micros;\n\t\t});\n\n\n\nThis chunk is part of the `test10()` and `test_date()` methods of the `FMTTestCase` class, which is a unit test for the `FMTLayout` class in the Apache Log4cxx library. The `test10()` method tests the behavior of the `FMTLayout` class with various filters applied, while the `test_date()` method tests the handling of date and time formatting.", "metadata": {"chunk_id": "doc_89_chunk_6", "original_index": 6, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_6"}, "type": "Document"} +{"page_content": "\t\tlog4cxx::spi::LoggingEventPtr logEvt = std::make_shared(LOG4CXX_STR(\"foo\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t Level::getInfo(),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t LOG4CXX_STR(\"A Message\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t log4cxx::spi::LocationInfo::getLocationUnavailable());\n\t\tFMTLayout layout(LOG4CXX_STR(\"{d:%Y-%m-%d %H:%M:%S} {message}\"));\n\t\tLogString output;\n\t\tlog4cxx::helpers::Pool pool;\n\t\tlayout.format( output, logEvt, pool);\n\n\n\nThe provided chunk is part of the `test_date()` function within the `FMTTestCase` class, which is testing the formatting of log messages using the `FMTLayout` class.", "metadata": {"chunk_id": "doc_89_chunk_7", "original_index": 7, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_7"}, "type": "Document"} +{"page_content": "\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction(nullptr);\n\n\t\tLOGUNIT_ASSERT_EQUAL(LOG4CXX_STR(\"2013-04-11 09:35:34 A Message\"), output);\n\t}\n\n\tstd::string createMessage(Pool & pool, int i)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = -1;\n\n\t\tPool pool;\n\n\n\t\tLOG4CXX_DEBUG(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_DEBUG(root, createMessage(pool, i));\n\n\t\tLOG4CXX_INFO(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_INFO(root, createMessage(pool, i));\n\n\t\tLOG4CXX_WARN(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_WARN(root, createMessage(pool, i));\n\n\n\nThis chunk of code is part of the `test_date()` method in the `FMTTestCase` class, which is testing the behavior of the `FMTLayout` class in the log4cxx library. The method sets a custom time function for the `Date` class, creates a `LoggingEvent` object, formats it using the `FMTLayout`, and then asserts that the output matches the expected value.", "metadata": {"chunk_id": "doc_89_chunk_8", "original_index": 8, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_8"}, "type": "Document"} +{"page_content": "\t\tLOG4CXX_ERROR(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_ERROR(root, createMessage(pool, i));\n\n\t\tLOG4CXX_FATAL(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_FATAL(root, createMessage(pool, i));\n\t}\n\n\tprivate:\n\t\tstatic const LogString FILTERED;\n\t\tstatic const LogString TEMP;\n\n};\n\nconst LogString FMTTestCase::TEMP(LOG4CXX_STR(\"output/fmtlayout\"));\nconst LogString FMTTestCase::FILTERED(LOG4CXX_STR(\"output/fmtlayoutfiltered\"));\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(FMTTestCase);\n\n\nThis chunk is part of the implementation of the `FMTTestCase` class, which is a test suite for the `FMTLayout` class in the log4cxx library. The chunk contains the implementation of the `common()` method, which logs messages at different log levels using the `LOG4CXX_ERROR` and `LOG4CXX_FATAL` macros.", "metadata": {"chunk_id": "doc_89_chunk_9", "original_index": 9, "pid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_9"}, "type": "Document"} +{"page_content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\n\nThe provided chunk is the copyright notice and license information for the BufferedWriter class, which is part of the log4cxx library.", "metadata": {"chunk_id": "doc_90_chunk_0", "original_index": 0, "pid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_0"}, "type": "Document"} +{"page_content": "#ifndef _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n#define _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n#include \n\nnamespace LOG4CXX_NS\n{\n\nnamespace helpers\n{\n\n/**\n* Writes text to a character-output stream buffering\n* requests to increase efficiency.\n*/\nclass LOG4CXX_EXPORT BufferedWriter : public Writer\n{\n\tprivate:\n\t\tLOG4CXX_DECLARE_PRIVATE_MEMBER_PTR(BufferedWriterPriv, m_priv)\n\n\n\nThis chunk represents the header file for the `BufferedWriter` class, which is part of the `LOG4CXX_NS` namespace in the `helpers` sub-namespace. The `BufferedWriter` class is responsible for writing text to a character-output stream while buffering requests to increase efficiency.", "metadata": {"chunk_id": "doc_90_chunk_1", "original_index": 1, "pid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_1"}, "type": "Document"} +{"page_content": "\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n\nThis chunk defines the public and private methods of the `BufferedWriter` class, which is part of the `LOG4CXX_NS::helpers` namespace. The class is responsible for writing text to a character-output stream in a buffered manner to increase efficiency.", "metadata": {"chunk_id": "doc_90_chunk_2", "original_index": 2, "pid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_2"}, "type": "Document"} diff --git a/experiments/data/contextual-embeddings/data_context/qrels.jsonl b/experiments/data/contextual-embeddings/data_context/qrels.jsonl new file mode 100644 index 0000000..659d68e --- /dev/null +++ b/experiments/data/contextual-embeddings/data_context/qrels.jsonl @@ -0,0 +1,248 @@ +{"0": {"5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_0": 1}} +{"1": {"5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_1": 1}} +{"2": {"5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_2": 1, "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_1": 1, "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145_0": 1}} +{"3": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_1": 1}} +{"4": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_1": 1}} +{"5": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_2": 1}} +{"6": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_0": 1}} +{"7": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_5": 1}} +{"8": {"78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c_6": 1}} +{"9": {"2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_4": 1}} +{"10": {"2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd_4": 1}} +{"11": {"531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f_0": 1}} +{"12": {"531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f_0": 1}} +{"13": {"531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f_0": 1}} +{"14": {"0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95_0": 1}} +{"15": {"0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95_0": 1}} +{"16": {"0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95_0": 1}} +{"17": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_0": 1}} +{"18": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_1": 1}} +{"19": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_2": 1}} +{"20": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_1": 1}} +{"21": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_2": 1}} +{"22": {"9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210_0": 1}} +{"23": {"a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_2": 1}} +{"24": {"a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f_1": 1}} +{"25": {"86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748_0": 1}} +{"26": {"f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_4": 1}} +{"27": {"f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_0": 1}} +{"28": {"f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642_1": 1}} +{"29": {"17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2_0": 1}} +{"30": {"17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2_0": 1}} +{"31": {"17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2_0": 1}} +{"32": {"db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_7": 1}} +{"33": {"db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_2": 1}} +{"34": {"db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_6": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_5": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_4": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_3": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_2": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_1": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_0": 1}} +{"35": {"db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_7": 1}} +{"36": {"db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_2": 1, "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4_1": 1}} +{"37": {"ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_0": 1}} +{"38": {"ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_0": 1}} +{"39": {"ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_1": 1}} +{"40": {"ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947_0": 1}} +{"41": {"3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_0": 1}} +{"42": {"3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_1": 1}} +{"43": {"3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_1": 1}} +{"44": {"3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_1": 1}} +{"45": {"3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b_0": 1}} +{"46": {"b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad_0": 1}} +{"47": {"b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad_0": 1}} +{"48": {"b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad_0": 1}} +{"49": {"d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_0": 1}} +{"50": {"d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f_4": 1}} +{"51": {"fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_0": 1}} +{"52": {"fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_0": 1}} +{"53": {"fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_2": 1}} +{"54": {"fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_3": 1}} +{"55": {"fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31_5": 1}} +{"56": {"44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3_1": 1}} +{"57": {"bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2_1": 1}} +{"58": {"2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_3": 1}} +{"59": {"2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3_3": 1}} +{"60": {"1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_0": 1}} +{"61": {"1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74_0": 1}} +{"62": {"538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_0": 1}} +{"63": {"538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_0": 1}} +{"64": {"538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_1": 1}} +{"65": {"538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5_3": 1}} +{"66": {"6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_0": 1}} +{"67": {"6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_1": 1, "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_0": 1}} +{"68": {"6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_1": 1}} +{"69": {"6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_1": 1, "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_0": 1}} +{"70": {"6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787_2": 1}} +{"71": {"68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_1": 1}} +{"72": {"68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_1": 1}} +{"73": {"68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_1": 1}} +{"74": {"68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_2": 1}} +{"75": {"68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c_2": 1}} +{"76": {"8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_3": 1}} +{"77": {"8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69_3": 1}} +{"78": {"e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_5": 1}} +{"79": {"e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1_9": 1}} +{"80": {"b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36_0": 1}} +{"81": {"b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36_0": 1}} +{"82": {"f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d_0": 1}} +{"83": {"f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d_0": 1}} +{"84": {"f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d_0": 1}} +{"85": {"087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a_1": 1}} +{"86": {"130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_2": 1}} +{"87": {"130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_0": 1}} +{"88": {"130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_0": 1}} +{"89": {"130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_1": 1}} +{"90": {"130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6_2": 1}} +{"91": {"43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd_1": 1}} +{"92": {"43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd_1": 1}} +{"93": {"4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8_0": 1}} +{"94": {"4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8_0": 1}} +{"95": {"4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8_0": 1}} +{"96": {"e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_2": 1}} +{"97": {"e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_3": 1, "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_2": 1, "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_1": 1, "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66_0": 1}} +{"98": {"f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_23": 1, "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_16": 1}} +{"99": {"f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_20": 1, "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_18": 1, "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_15": 1, "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913_0": 1}} +{"100": {"bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_3": 1}} +{"101": {"bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_3": 1}} +{"102": {"bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee_5": 1}} +{"103": {"f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0_0": 1}} +{"104": {"f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0_0": 1}} +{"105": {"f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0_0": 1}} +{"106": {"f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0_0": 1}} +{"107": {"26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_2": 1, "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_1": 1, "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_0": 1}} +{"108": {"26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_1": 1}} +{"109": {"26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_4": 1}} +{"110": {"26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08_2": 1}} +{"111": {"e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_3": 1}} +{"112": {"e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05_2": 1}} +{"113": {"e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0_9": 1}} +{"114": {"c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_6": 1, "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_4": 1, "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_3": 1, "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_2": 1, "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_1": 1, "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_0": 1}} +{"115": {"c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf_7": 1}} +{"116": {"eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09_1": 1}} +{"117": {"eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09_1": 1}} +{"118": {"a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b_0": 1}} +{"119": {"a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b_1": 1}} +{"120": {"a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b_1": 1}} +{"121": {"2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_1": 1}} +{"122": {"2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9_3": 1}} +{"123": {"b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_9": 1, "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_3": 1, "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_1": 1, "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_0": 1}} +{"124": {"b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_5": 1}} +{"125": {"b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad_1": 1}} +{"126": {"cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_3": 1}} +{"127": {"cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_11": 1, "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_10": 1, "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_7": 1, "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25_3": 1}} +{"128": {"2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_1": 1}} +{"129": {"2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_1": 1, "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_0": 1}} +{"130": {"2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_1": 1}} +{"131": {"2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757_1": 1}} +{"132": {"bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_1": 1}} +{"133": {"bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b_1": 1}} +{"134": {"4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_6": 1, "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_3": 1, "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_1": 1, "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_0": 1}} +{"135": {"4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_6": 1, "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_5": 1}} +{"136": {"4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_8": 1, "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe_7": 1}} +{"137": {"d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_2": 1}} +{"138": {"d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_1": 1}} +{"139": {"d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_1": 1, "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_0": 1}} +{"140": {"d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2_11": 1}} +{"141": {"36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_1": 1}} +{"142": {"36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_2": 1}} +{"143": {"36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_4": 1}} +{"144": {"36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a_5": 1}} +{"145": {"b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e_1": 1, "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e_0": 1}} +{"146": {"7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298_1": 1}} +{"147": {"7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298_2": 1}} +{"148": {"0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238_12": 1}} +{"149": {"adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823_0": 1}} +{"150": {"adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823_0": 1}} +{"151": {"1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84_0": 1}} +{"152": {"1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84_2": 1}} +{"153": {"9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_0": 1}} +{"154": {"9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582_0": 1}} +{"155": {"639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_0": 1}} +{"156": {"639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_1": 1}} +{"157": {"639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304_3": 1}} +{"158": {"21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_0": 1}} +{"159": {"21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_1": 1}} +{"160": {"21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac_4": 1}} +{"161": {"58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_0": 1}} +{"162": {"58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_0": 1}} +{"163": {"58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_1": 1}} +{"164": {"58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_1": 1}} +{"165": {"58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0_1": 1}} +{"166": {"0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_1": 1, "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_0": 1}} +{"167": {"0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_2": 1}} +{"168": {"0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4_3": 1}} +{"169": {"ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26_0": 1}} +{"170": {"ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26_1": 1}} +{"171": {"8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_1": 1}} +{"172": {"8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94_2": 1}} +{"173": {"63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_0": 1}} +{"174": {"63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_1": 1}} +{"175": {"63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc_3": 1}} +{"176": {"28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8_2": 1}} +{"177": {"e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7_0": 1}} +{"178": {"e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7_0": 1}} +{"179": {"e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7_0": 1}} +{"180": {"e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7_0": 1}} +{"181": {"dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578_2": 1}} +{"182": {"2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_2": 1}} +{"183": {"2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_4": 1}} +{"184": {"2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_3": 1, "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_2": 1, "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_1": 1, "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54_0": 1}} +{"185": {"6e284600be25c8833b866ef0ebfab953a8d0a0f8420cfe56fa17e28664de1b82_0": 1}} +{"186": {"c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_1": 1}} +{"187": {"c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_3": 1}} +{"188": {"c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb_3": 1}} +{"189": {"3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293_0": 1}} +{"190": {"3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293_0": 1}} +{"191": {"96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_14": 1}} +{"192": {"96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_49": 1}} +{"193": {"96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_77": 1, "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_72": 1, "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_51": 1, "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_26": 1, "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_16": 1, "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b_15": 1}} +{"194": {"9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_1": 1}} +{"195": {"9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_1": 1}} +{"196": {"9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_1": 1}} +{"197": {"9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65_1": 1}} +{"198": {"139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_1": 1}} +{"199": {"139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_1": 1}} +{"200": {"139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de_1": 1}} +{"201": {"883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd_1": 1}} +{"202": {"8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5_1": 1}} +{"203": {"76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_7": 1, "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_1": 1}} +{"204": {"76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_7": 1}} +{"205": {"76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3_9": 1}} +{"206": {"05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_6": 1}} +{"207": {"05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_11": 1}} +{"208": {"05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7_17": 1}} +{"209": {"7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6_3": 1}} +{"210": {"e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_2": 1, "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_1": 1}} +{"211": {"e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320_3": 1}} +{"212": {"b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_2": 1}} +{"213": {"b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_2": 1, "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_1": 1, "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_0": 1}} +{"214": {"b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e_1": 1}} +{"215": {"85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_2": 1}} +{"216": {"85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_2": 1}} +{"217": {"85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107_3": 1}} +{"218": {"d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_2": 1}} +{"219": {"d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_0": 1}} +{"220": {"d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0_1": 1}} +{"221": {"5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_1": 1}} +{"222": {"5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_1": 1}} +{"223": {"5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_1": 1}} +{"224": {"5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4_1": 1}} +{"225": {"968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_3": 1, "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_1": 1, "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_0": 1}} +{"226": {"968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4_6": 1}} +{"227": {"652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_1": 1, "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_0": 1}} +{"228": {"652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_2": 1}} +{"229": {"652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_2": 1}} +{"230": {"652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_2": 1}} +{"231": {"652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863_1": 1}} +{"232": {"6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a_1": 1}} +{"233": {"b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_0": 1}} +{"234": {"b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_1": 1}} +{"235": {"b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_1": 1}} +{"236": {"b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac_0": 1}} +{"237": {"e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_1": 1}} +{"238": {"e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_3": 1, "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76_2": 1}} +{"239": {"0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_1": 1}} +{"240": {"0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_1": 1}} +{"241": {"0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_1": 1}} +{"242": {"0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b_1": 1}} +{"243": {"fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_3": 1, "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_2": 1, "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_0": 1}} +{"244": {"fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733_8": 1}} +{"245": {"4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_2": 1}} +{"246": {"4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_2": 1}} +{"247": {"4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f_2": 1}} diff --git a/experiments/data/contextual-embeddings/data_context/queries.jsonl b/experiments/data/contextual-embeddings/data_context/queries.jsonl new file mode 100644 index 0000000..b966ac7 --- /dev/null +++ b/experiments/data/contextual-embeddings/data_context/queries.jsonl @@ -0,0 +1,248 @@ +{"id": "0", "text": "What is the purpose of the DiffExecutor struct?"} +{"id": "1", "text": "How do you create a new DiffExecutor instance?"} +{"id": "2", "text": "What happens in the `run_target` method of the DiffExecutor?"} +{"id": "3", "text": "What is the purpose of the SIGNALS and SIGNALS_PTR static variables?"} +{"id": "4", "text": "How does the harness closure work?"} +{"id": "5", "text": "What is the purpose of the StdMapObserver?"} +{"id": "6", "text": "What feedbacks are used in this fuzzer?"} +{"id": "7", "text": "How is the initial corpus generated?"} +{"id": "8", "text": "What stages are used in the fuzzer?"} +{"id": "9", "text": "What does the `OomObserver` struct do?"} +{"id": "10", "text": "How do I create a new `OomObserver`?"} +{"id": "11", "text": "What does the function `both_require` do?"} +{"id": "12", "text": "How does `both_require` check for the sequence 'a', 'b', 'c'?"} +{"id": "13", "text": "What is the purpose of the `len` parameter in `both_require`?"} +{"id": "14", "text": "What does the vuln() function do?"} +{"id": "15", "text": "How is input normally read in the main() function?"} +{"id": "16", "text": "What input condition causes the program to abort in the main() function?"} +{"id": "17", "text": "What is the purpose of the `MergeScheduler` struct?"} +{"id": "18", "text": "How does the `on_add` method of the `MergeScheduler` work?"} +{"id": "19", "text": "What is the purpose of the `removable()` method in the `MergeScheduler`?"} +{"id": "20", "text": "How does the `on_remove` method of the `MergeScheduler` work?"} +{"id": "21", "text": "What is the purpose of the `current()` method in the `MergeScheduler`?"} +{"id": "22", "text": "Why is the `next()` method of the `MergeScheduler` unimplemented?"} +{"id": "23", "text": "How are the `Fp` and `Lr` registers defined as aliases in the `Regs` enum?"} +{"id": "24", "text": "What is the purpose of the `get_backdoor_arch_regs` function?"} +{"id": "25", "text": "How do I get the `EnumMap` of backdoor architecture registers?"} +{"id": "26", "text": "How do you convert a `NautilusInput` to a `BytesInput`?"} +{"id": "27", "text": "How do you get the `Tree` representation of a `NautilusInput`?"} +{"id": "28", "text": "What traits does `NautilusInput` implement?"} +{"id": "29", "text": "How do you initialize the logger?"} +{"id": "30", "text": "How is the log file created?"} +{"id": "31", "text": "What logger implementation is being used?"} +{"id": "32", "text": "How do you register a new type in the Registry?"} +{"id": "33", "text": "What is the purpose of the `_real_register` method?"} +{"id": "34", "text": "How can you retrieve registered types from the Registry?"} +{"id": "35", "text": "How are targeted types handled in the Registry?"} +{"id": "36", "text": "What is the purpose of the `_modules` set in the Registry?"} +{"id": "37", "text": "What does the Octal class do?"} +{"id": "38", "text": "How does the decode method of the Octal class work?"} +{"id": "39", "text": "What does the getTarget method of the Octal class do?"} +{"id": "40", "text": "What external dependencies does the Octal class have?"} +{"id": "41", "text": "How does the decode method of the A1z26 class work?"} +{"id": "42", "text": "What is the purpose of the priority method in the A1z26 class?"} +{"id": "43", "text": "What does the getParams method do in the A1z26 class?"} +{"id": "44", "text": "What is the purpose of the getTarget method?"} +{"id": "45", "text": "How are the delimiters in the input ciphertext handled?"} +{"id": "46", "text": "What is the purpose of the priority method in the Base58_ripple class?"} +{"id": "47", "text": "What does the getParams method of the Base58_ripple class do?"} +{"id": "48", "text": "What is the purpose of the getTarget method in the Base58_ripple class?"} +{"id": "49", "text": "How are the character and word boundaries determined in the Morse code decoding process?"} +{"id": "50", "text": "What is the purpose of the priority method in the Morse_code class?"} +{"id": "51", "text": "What does the `getInfo` method of the `Soundex` class do?"} +{"id": "52", "text": "What does the `getTarget` method of the `Soundex` class return?"} +{"id": "53", "text": "How does the `attemptCrack` method of the `Soundex` class attempt to crack a Soundex-encoded ciphertext?"} +{"id": "54", "text": "What does the `sortlistwithdict` method of the `Soundex` class do?"} +{"id": "55", "text": "What parameters does the `Soundex` class take in its constructor?"} +{"id": "56", "text": "What parameters can be configured for the Tap_code decoder?"} +{"id": "57", "text": "How does the CipheyDists class handle configuration?"} +{"id": "58", "text": "What is the priority method used for in the Base69 class?"} +{"id": "59", "text": "How are the parameters for the Base69 class specified?"} +{"id": "60", "text": "What encryption schemes do the tests cover?"} +{"id": "61", "text": "What is the expected decrypted plaintext used in most of the tests?"} +{"id": "62", "text": "What does the MakeBools function return?"} +{"id": "63", "text": "How does the MakeFixedStrings function work?"} +{"id": "64", "text": "What is the purpose of the long string in the MakeStrings function?"} +{"id": "65", "text": "What UUID values are returned by the MakeUUIDs function?"} +{"id": "66", "text": "How can I append a column to a ColumnTuple?"} +{"id": "67", "text": "How do I load column data from an input stream into a ColumnTuple?"} +{"id": "68", "text": "How can I clear the data in a ColumnTuple?"} +{"id": "69", "text": "How do I get the number of rows in a ColumnTuple?"} +{"id": "70", "text": "What is the purpose of the ColumnTupleT class?"} +{"id": "71", "text": "How can you append elements to a ColumnIPv4 instance?"} +{"id": "72", "text": "How can you access elements from a ColumnIPv4 instance?"} +{"id": "73", "text": "How can you append the content of another column to a ColumnIPv4 instance?"} +{"id": "74", "text": "How can you get the number of rows in a ColumnIPv4 instance?"} +{"id": "75", "text": "How can you create a slice of a ColumnIPv4 instance?"} +{"id": "76", "text": "What does the GetTypeMeta() function do?"} +{"id": "77", "text": "How does the CompateStringsCaseInsensitive() function compare two strings case-insensitively?"} +{"id": "78", "text": "What regular expression syntax is supported on Windows and Mac for death tests?"} +{"id": "79", "text": "What is a known caveat with \"threadsafe\" style death tests?"} +{"id": "80", "text": "How do you read a string using WireFormat?"} +{"id": "81", "text": "How do you read a 64-bit unsigned integer using WireFormat?"} +{"id": "82", "text": "What is the purpose of the LoadPrefix function in the Column class?"} +{"id": "83", "text": "What is the purpose of the SavePrefix function in the Column class?"} +{"id": "84", "text": "How does the Save function in the Column class work?"} +{"id": "85", "text": "How does the ColumnLowCardinality class handle null values?"} +{"id": "86", "text": "What geometric data types are supported by the code?"} +{"id": "87", "text": "How can you append an element to a ColumnGeo?"} +{"id": "88", "text": "How can you access an element in a ColumnGeo?"} +{"id": "89", "text": "How can you append the content of one ColumnGeo to another?"} +{"id": "90", "text": "How can you clear the data of a ColumnGeo?"} +{"id": "91", "text": "How do you construct a ProjectedIterator?"} +{"id": "92", "text": "How do you increment and decrement a ProjectedIterator?"} +{"id": "93", "text": "What are the possible values for the ConsoleOutput enum?"} +{"id": "94", "text": "What package is the ConsoleOutput enum defined in?"} +{"id": "95", "text": "What do the different values of the ConsoleOutput enum represent?"} +{"id": "96", "text": "How does the UpdateChecker store the timestamp of the last update check?"} +{"id": "97", "text": "What does the UpdateChecker return if the current version is up to date?"} +{"id": "98", "text": "How does the DefaultCredentialRetrievers class handle credential helpers on Windows?"} +{"id": "99", "text": "How does the DefaultCredentialRetrievers class avoid duplicate CredentialRetriever instances?"} +{"id": "100", "text": "What does the `ReproducibleImageTest` test class verify?"} +{"id": "101", "text": "How does the `createImage()` method create the test image?"} +{"id": "102", "text": "What is the expected tarball structure and how is it verified in `testTarballStructure()`?"} +{"id": "103", "text": "What package does the HelloWorld class belong to?"} +{"id": "104", "text": "What license is this code released under?"} +{"id": "105", "text": "What year was this code copyrighted?"} +{"id": "106", "text": "What company owns the copyright to this code?"} +{"id": "107", "text": "How does MavenSettingsServerCredentials infer credentials for a server?"} +{"id": "108", "text": "What exceptions can be thrown when inferring credentials with MavenSettingsServerCredentials?"} +{"id": "109", "text": "What is the format of the returned AuthProperty when inferring credentials?"} +{"id": "110", "text": "How are the test settings files used in the tests?"} +{"id": "111", "text": "How does the testPull() method verify the correctness of the pulled BLOB?"} +{"id": "112", "text": "How is the RegistryClient instance created in the test methods?"} +{"id": "113", "text": "How does JibBuildRunner handle a RegistryUnauthorizedException with a 403 Forbidden status code?"} +{"id": "114", "text": "How does the buildToDockerDaemonAndRun method verify the built image?"} +{"id": "115", "text": "How does the testExecute_dockerClient test work?"} +{"id": "116", "text": "What exception is thrown when registry authentication fails?"} +{"id": "117", "text": "What information is included in the exception message when a `RegistryAuthenticationFailedException` is thrown?"} +{"id": "118", "text": "What is the default length of a generated pepper when no length is specified?"} +{"id": "119", "text": "Is it possible to generate a pepper with a length of zero?"} +{"id": "120", "text": "What is the expected value of the pepper returned by the PepperGenerator.get() method?"} +{"id": "121", "text": "What is the purpose of the slowEquals method that takes two CharSequence objects?"} +{"id": "122", "text": "How does the hash method handle the presence or absence of a salt value?"} +{"id": "123", "text": "What are the input parameters for the BalloonHashingFunction constructor?"} +{"id": "124", "text": "How can I obtain an instance of the BalloonHashingFunction using the factory method?"} +{"id": "125", "text": "What are the test vectors used in the `TEST_VECTORS` array?"} +{"id": "126", "text": "What is the maximum digest size supported by this Blake2b implementation?"} +{"id": "127", "text": "How do you reset the hasher to its initial state?"} +{"id": "128", "text": "What class does BadParametersException extend?"} +{"id": "129", "text": "What package does the BadParametersException class belong to?"} +{"id": "130", "text": "Who is the author of the BadParametersException class?"} +{"id": "131", "text": "Since which version has the BadParametersException class been available?"} +{"id": "132", "text": "What is the purpose of the Hash class?"} +{"id": "133", "text": "What information does the Hash class store?"} +{"id": "134", "text": "How do I create a HashBuilder instance?"} +{"id": "135", "text": "How do I specify the hashing algorithm to use with the HashBuilder?"} +{"id": "136", "text": "What is the difference between withPBKDF2() and withCompressedPBKDF2()?"} +{"id": "137", "text": "How do you create an instance of MessageDigestFunction with a specific hashing algorithm and salt option?"} +{"id": "138", "text": "How do you hash a password using MessageDigestFunction?"} +{"id": "139", "text": "How do you check if a password matches a hashed value using MessageDigestFunction?"} +{"id": "140", "text": "How can you retrieve the hashing algorithm and salt option used by a MessageDigestFunction instance?"} +{"id": "141", "text": "What does the test method `issue92()` do?"} +{"id": "142", "text": "What does the test method `issue99()` do?"} +{"id": "143", "text": "What does the test method `issue93()` do?"} +{"id": "144", "text": "What does the test method `issue120()` do?"} +{"id": "145", "text": "Which package does the Argon2 enum belong to?"} +{"id": "146", "text": "What is the purpose of the `Tag` class?"} +{"id": "147", "text": "What is the purpose of the `pull` function in the `Tag` class?"} +{"id": "148", "text": "How does the Serializer class serialize branch control instructions like br and br_if?"} +{"id": "149", "text": "What is the purpose of the `Log` class?"} +{"id": "150", "text": "What is the expected behavior of the `body` method of the `Log` class?"} +{"id": "151", "text": "What is the purpose of the `procRaise` function in the `Environ` class?"} +{"id": "152", "text": "How does the `procRaise` function handle unsupported signals?"} +{"id": "153", "text": "What is the purpose of the `printPluginMock` function?"} +{"id": "154", "text": "What is the purpose of the `body` method in the mock classes?"} +{"id": "155", "text": "How do you access the magic bytes of a Module?"} +{"id": "156", "text": "What sections are contained in a Module?"} +{"id": "157", "text": "How do you check if a Module has been validated?"} +{"id": "158", "text": "How can I retrieve the current log level using the provided classes?"} +{"id": "159", "text": "How can I set an integer option value using the provided classes?"} +{"id": "160", "text": "What is the purpose of the AVRescaleQ class?"} +{"id": "161", "text": "What is the purpose of the WasmEdge::PO namespace?"} +{"id": "162", "text": "What error codes are defined in the ErrCode enum?"} +{"id": "163", "text": "What data does the Error class store?"} +{"id": "164", "text": "How can I construct an Error object?"} +{"id": "165", "text": "How can I access the error code and message of an Error object?"} +{"id": "166", "text": "How can I set the ID of a specific chapter using the provided functions?"} +{"id": "167", "text": "How can I set the time base of a specific chapter?"} +{"id": "168", "text": "What does the AVChapterStart function do?"} +{"id": "169", "text": "What is the purpose of the `importPk` function?"} +{"id": "170", "text": "How does the `pkExportData` function work?"} +{"id": "171", "text": "How do you create a new instance of the `Pty` struct?"} +{"id": "172", "text": "How can you access the child process watcher associated with a `Pty` instance?"} +{"id": "173", "text": "What does the `merge` function do?"} +{"id": "174", "text": "How does the `merge_tables` function work?"} +{"id": "175", "text": "What does the `merge_sequence` test verify?"} +{"id": "176", "text": "How does the `get_pw_entry` function work?"} +{"id": "177", "text": "What fields does the `Mouse` struct contain?"} +{"id": "178", "text": "How are the default mouse bindings obtained in the `default` implementation of `MouseBindings`?"} +{"id": "179", "text": "How is deserialization handled for the `MouseBindings` struct?"} +{"id": "180", "text": "What is the purpose of the `MouseBinding` type?"} +{"id": "181", "text": "What is the purpose of the `new_nop` method?"} +{"id": "182", "text": "How does the Scheduler schedule a new event?"} +{"id": "183", "text": "How can you cancel a scheduled event?"} +{"id": "184", "text": "How can you check if a timer is already scheduled?"} +{"id": "185", "text": "What does the `attach_handler` function do?"} +{"id": "186", "text": "How do you create a new `Row` instance?"} +{"id": "187", "text": "What does the `grow` method do in the `Row` struct?"} +{"id": "188", "text": "How does the `shrink` method work in the `Row` struct?"} +{"id": "189", "text": "What is the purpose of the `Debug` struct?"} +{"id": "190", "text": "How can I specify the renderer preference in Alacritty?"} +{"id": "191", "text": "What is the purpose of the Display struct?"} +{"id": "192", "text": "How does the Display struct handle configuration updates?"} +{"id": "193", "text": "What is the purpose of the FrameTimer struct?"} +{"id": "194", "text": "What class does HasWeightCol extend?"} +{"id": "195", "text": "What is the name and description of the weight column parameter?"} +{"id": "196", "text": "How can I get the value of the weight column parameter?"} +{"id": "197", "text": "What is the default value of the weight column parameter?"} +{"id": "198", "text": "What parameters does the VectorSlicer class have?"} +{"id": "199", "text": "How do I specify the indices to slice from the input vector?"} +{"id": "200", "text": "What restrictions are there on the indices parameter?"} +{"id": "201", "text": "How does an operator or UDF get access to the current epoch number?"} +{"id": "202", "text": "Are the vector and l2Norm fields mutable in the VectorWithNorm class?"} +{"id": "203", "text": "How do you set the parameters of the UnivariateFeatureSelector?"} +{"id": "204", "text": "What happens if you don't set the feature_type or label_type parameters of the UnivariateFeatureSelector?"} +{"id": "205", "text": "What is the output schema of the UnivariateFeatureSelectorModel after transforming data?"} +{"id": "206", "text": "How does the testParam() method verify the parameter settings of LinearRegression?"} +{"id": "207", "text": "What does the testInputTypeConversion() method verify?"} +{"id": "208", "text": "What does the testRegularization() method check?"} +{"id": "209", "text": "What is the maximum size of a segment that the MemorySegmentWriter will write?"} +{"id": "210", "text": "What is the purpose of the HeadOperatorCoordinator class?"} +{"id": "211", "text": "How does the HeadOperatorCoordinator determine when to send out a GloballyAlignedEvent?"} +{"id": "212", "text": "How is the input data generated in this example?"} +{"id": "213", "text": "How are the results extracted and displayed in this example?"} +{"id": "214", "text": "What execution environment is used in this example?"} +{"id": "215", "text": "How do you create an IndexToStringModel instance?"} +{"id": "216", "text": "How do you set the model data for the IndexToStringModel?"} +{"id": "217", "text": "How do you extract and display the results after applying the IndexToStringModel?"} +{"id": "218", "text": "How do you create a new instance of the ColorEndPatternConverter?"} +{"id": "219", "text": "What parameters does the ColorEndPatternConverter constructor take?"} +{"id": "220", "text": "What namespaces are used in this file?"} +{"id": "221", "text": "What does the XMLFilenameFilter constructor do?"} +{"id": "222", "text": "What is the purpose of the pattern and replacement strings constructed in the XMLFilenameFilter constructor?"} +{"id": "223", "text": "How would the XMLFilenameFilter be used?"} +{"id": "224", "text": "What C++ standard library classes are used in this code?"} +{"id": "225", "text": "How does the testTrigger unit test work?"} +{"id": "226", "text": "How does the testValid unit test work?"} +{"id": "227", "text": "How do you configure the logging system with a specific layout using BasicConfigurator?"} +{"id": "228", "text": "How do you configure the logging system with a specific appender using BasicConfigurator?"} +{"id": "229", "text": "How do you reset the logging configuration to its default state using BasicConfigurator?"} +{"id": "230", "text": "What is the default layout used by BasicConfigurator if no layout is provided?"} +{"id": "231", "text": "What is the purpose of the WideLife template class used in the code?"} +{"id": "232", "text": "How does the hexdump function handle different character types for logging?"} +{"id": "233", "text": "How do you create a DenyAllFilter instance?"} +{"id": "234", "text": "What do you need to do after creating a DenyAllFilter instance?"} +{"id": "235", "text": "What is the signature of the decide() method of the DenyAllFilter?"} +{"id": "236", "text": "What namespaces are used in this file?"} +{"id": "237", "text": "What is the purpose of the MinimumTestCase class?"} +{"id": "238", "text": "What is the purpose of the common() method?"} +{"id": "239", "text": "How do you obtain an instance of NDCPatternConverter?"} +{"id": "240", "text": "How does the NDCPatternConverter format the logging event?"} +{"id": "241", "text": "What is the inheritance hierarchy of NDCPatternConverter?"} +{"id": "242", "text": "What macros are used in the NDCPatternConverter class declaration?"} +{"id": "243", "text": "How does the test1() method test the FMTLayout?"} +{"id": "244", "text": "What is the purpose of the common() method?"} +{"id": "245", "text": "How do you construct a BufferedWriter object?"} +{"id": "246", "text": "What methods does BufferedWriter override from its parent class?"} +{"id": "247", "text": "How does BufferedWriter handle object destruction?"} diff --git a/experiments/data/contextual-embeddings/original_data/codebase_chunks.json b/experiments/data/contextual-embeddings/original_data/codebase_chunks.json new file mode 100644 index 0000000..af149f4 --- /dev/null +++ b/experiments/data/contextual-embeddings/original_data/codebase_chunks.json @@ -0,0 +1,4317 @@ +[ + { + "doc_id": "doc_1", + "original_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", + "content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\nimpl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n .pre_observe_first_all(observers.primary.as_mut())?;\n observers.primary.as_mut().pre_exec_all(state, input)?;\n let ret1 = self.primary.run_target(fuzzer, state, mgr, input)?;\n observers\n .primary\n .as_mut()\n .post_exec_all(state, input, &ret1)?;\n observers\n .differential\n .post_observe_first_all(observers.primary.as_mut())?;\n observers\n .differential\n .pre_observe_second_all(observers.secondary.as_mut())?;\n observers.secondary.as_mut().pre_exec_all(state, input)?;\n let ret2 = self.secondary.run_target(fuzzer, state, mgr, input)?;\n observers\n .secondary\n .as_mut()\n .post_exec_all(state, input, &ret2)?;\n observers\n .differential\n .post_observe_second_all(observers.secondary.as_mut())?;\n if ret1 == ret2 {\n Ok(ret1)\n } else {\n // We found a diff in the exit codes!\n Ok(ExitKind::Diff {\n primary: ret1.into(),\n secondary: ret2.into(),\n })\n }\n }\n}\n\n/// Proxy the observers of the inner executors\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(\n bound = \"A: serde::Serialize + serde::de::DeserializeOwned, B: serde::Serialize + serde::de::DeserializeOwned, DOT: serde::Serialize + serde::de::DeserializeOwned\"\n)]\npub struct ProxyObserversTuple {\n primary: OwnedMutPtr,\n secondary: OwnedMutPtr,\n differential: DOT,\n}\n\nimpl ObserversTuple for ProxyObserversTuple\nwhere\n A: ObserversTuple,\n B: ObserversTuple,\n DOT: DifferentialObserversTuple,\n S: UsesInput,\n{\n fn pre_exec_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_all(state, input)\n }\n\n fn post_exec_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential.post_exec_all(state, input, exit_kind)\n }\n\n fn pre_exec_child_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_child_all(state, input)\n }\n\n fn post_exec_child_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential\n .post_exec_child_all(state, input, exit_kind)\n }\n\n /// Returns true if a `stdout` observer was added to the list\n #[inline]\n fn observes_stdout(&self) -> bool {\n self.primary.as_ref().observes_stdout() || self.secondary.as_ref().observes_stdout()\n }\n /// Returns true if a `stderr` observer was added to the list\n #[inline]\n fn observes_stderr(&self) -> bool {\n self.primary.as_ref().observes_stderr() || self.secondary.as_ref().observes_stderr()\n }\n\n /// Runs `observe_stdout` for all stdout observers in the list\n fn observe_stdout(&mut self, stdout: &[u8]) {\n self.primary.as_mut().observe_stderr(stdout);\n self.secondary.as_mut().observe_stderr(stdout);\n }\n\n /// Runs `observe_stderr` for all stderr observers in the list\n fn observe_stderr(&mut self, stderr: &[u8]) {\n self.primary.as_mut().observe_stderr(stderr);\n self.secondary.as_mut().observe_stderr(stderr);\n }\n}\n\nimpl MatchName for ProxyObserversTuple\nwhere\n A: MatchName,\n B: MatchName,\n DOT: MatchName,\n{\n fn match_name(&self, name: &str) -> Option<&T> {\n if let Some(t) = self.primary.as_ref().match_name::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_ref().match_name::(name) {\n Some(t)\n } else {\n self.differential.match_name::(name)\n }\n }\n fn match_name_mut(&mut self, name: &str) -> Option<&mut T> {\n if let Some(t) = self.primary.as_mut().match_name_mut::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_mut().match_name_mut::(name) {\n Some(t)\n } else {\n self.differential.match_name_mut::(name)\n }\n }\n}\n\nimpl ProxyObserversTuple {\n fn set(&mut self, primary: &A, secondary: &B) {\n self.primary = OwnedMutPtr::Ptr(ptr::from_ref(primary) as *mut A);\n self.secondary = OwnedMutPtr::Ptr(ptr::from_ref(secondary) as *mut B);\n }\n}\n\nimpl UsesObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n type Observers = ProxyObserversTuple;\n}\n\nimpl UsesState for DiffExecutor\nwhere\n A: UsesState,\n B: UsesState,\n{\n type State = A::State;\n}\n\nimpl HasObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n #[inline]\n fn observers(&self) -> &ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_ref().unwrap()\n }\n }\n\n #[inline]\n fn observers_mut(&mut self) -> &mut ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_mut().unwrap()\n }\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_1_chunk_0", + "original_index": 0, + "content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\n" + }, + { + "chunk_id": "doc_1_chunk_1", + "original_index": 1, + "content": "impl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n" + }, + { + "chunk_id": "doc_1_chunk_2", + "original_index": 2, + "content": " /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n" + }, + { + "chunk_id": "doc_1_chunk_3", + "original_index": 3, + "content": " .pre_observe_first_all(observers.primary.as_mut())?;\n observers.primary.as_mut().pre_exec_all(state, input)?;\n let ret1 = self.primary.run_target(fuzzer, state, mgr, input)?;\n observers\n .primary\n .as_mut()\n .post_exec_all(state, input, &ret1)?;\n observers\n .differential\n .post_observe_first_all(observers.primary.as_mut())?;\n observers\n .differential\n" + }, + { + "chunk_id": "doc_1_chunk_4", + "original_index": 4, + "content": " .pre_observe_second_all(observers.secondary.as_mut())?;\n observers.secondary.as_mut().pre_exec_all(state, input)?;\n let ret2 = self.secondary.run_target(fuzzer, state, mgr, input)?;\n observers\n .secondary\n .as_mut()\n .post_exec_all(state, input, &ret2)?;\n observers\n .differential\n .post_observe_second_all(observers.secondary.as_mut())?;\n if ret1 == ret2 {\n Ok(ret1)\n } else {\n // We found a diff in the exit codes!\n Ok(ExitKind::Diff {\n primary: ret1.into(),\n secondary: ret2.into(),\n })\n }\n }\n}\n\n" + }, + { + "chunk_id": "doc_1_chunk_5", + "original_index": 5, + "content": "/// Proxy the observers of the inner executors\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(\n bound = \"A: serde::Serialize + serde::de::DeserializeOwned, B: serde::Serialize + serde::de::DeserializeOwned, DOT: serde::Serialize + serde::de::DeserializeOwned\"\n)]\npub struct ProxyObserversTuple {\n primary: OwnedMutPtr,\n secondary: OwnedMutPtr,\n differential: DOT,\n}\n\nimpl ObserversTuple for ProxyObserversTuple\nwhere\n A: ObserversTuple,\n B: ObserversTuple,\n DOT: DifferentialObserversTuple,\n S: UsesInput,\n{\n fn pre_exec_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_all(state, input)\n }\n\n" + }, + { + "chunk_id": "doc_1_chunk_6", + "original_index": 6, + "content": " fn post_exec_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential.post_exec_all(state, input, exit_kind)\n }\n\n fn pre_exec_child_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_child_all(state, input)\n }\n\n fn post_exec_child_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential\n .post_exec_child_all(state, input, exit_kind)\n }\n\n" + }, + { + "chunk_id": "doc_1_chunk_7", + "original_index": 7, + "content": " /// Returns true if a `stdout` observer was added to the list\n #[inline]\n fn observes_stdout(&self) -> bool {\n self.primary.as_ref().observes_stdout() || self.secondary.as_ref().observes_stdout()\n }\n /// Returns true if a `stderr` observer was added to the list\n #[inline]\n fn observes_stderr(&self) -> bool {\n self.primary.as_ref().observes_stderr() || self.secondary.as_ref().observes_stderr()\n }\n\n" + }, + { + "chunk_id": "doc_1_chunk_8", + "original_index": 8, + "content": " /// Runs `observe_stdout` for all stdout observers in the list\n fn observe_stdout(&mut self, stdout: &[u8]) {\n self.primary.as_mut().observe_stderr(stdout);\n self.secondary.as_mut().observe_stderr(stdout);\n }\n\n /// Runs `observe_stderr` for all stderr observers in the list\n fn observe_stderr(&mut self, stderr: &[u8]) {\n self.primary.as_mut().observe_stderr(stderr);\n self.secondary.as_mut().observe_stderr(stderr);\n }\n}\n\n" + }, + { + "chunk_id": "doc_1_chunk_9", + "original_index": 9, + "content": "impl MatchName for ProxyObserversTuple\nwhere\n A: MatchName,\n B: MatchName,\n DOT: MatchName,\n{\n fn match_name(&self, name: &str) -> Option<&T> {\n if let Some(t) = self.primary.as_ref().match_name::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_ref().match_name::(name) {\n Some(t)\n } else {\n self.differential.match_name::(name)\n }\n }\n fn match_name_mut(&mut self, name: &str) -> Option<&mut T> {\n if let Some(t) = self.primary.as_mut().match_name_mut::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_mut().match_name_mut::(name) {\n Some(t)\n } else {\n self.differential.match_name_mut::(name)\n }\n }\n}\n\n" + }, + { + "chunk_id": "doc_1_chunk_10", + "original_index": 10, + "content": "impl ProxyObserversTuple {\n fn set(&mut self, primary: &A, secondary: &B) {\n self.primary = OwnedMutPtr::Ptr(ptr::from_ref(primary) as *mut A);\n self.secondary = OwnedMutPtr::Ptr(ptr::from_ref(secondary) as *mut B);\n }\n}\n\nimpl UsesObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n type Observers = ProxyObserversTuple;\n}\n\nimpl UsesState for DiffExecutor\nwhere\n A: UsesState,\n B: UsesState,\n{\n type State = A::State;\n}\n\n" + }, + { + "chunk_id": "doc_1_chunk_11", + "original_index": 11, + "content": "impl HasObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n #[inline]\n fn observers(&self) -> &ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_ref().unwrap()\n }\n }\n\n" + }, + { + "chunk_id": "doc_1_chunk_12", + "original_index": 12, + "content": " #[inline]\n fn observers_mut(&mut self) -> &mut ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_mut().unwrap()\n }\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_2", + "original_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", + "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", + "chunks": [ + { + "chunk_id": "doc_2_chunk_0", + "original_index": 0, + "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n" + }, + { + "chunk_id": "doc_2_chunk_1", + "original_index": 1, + "content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n" + }, + { + "chunk_id": "doc_2_chunk_2", + "original_index": 2, + "content": " #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n" + }, + { + "chunk_id": "doc_2_chunk_3", + "original_index": 3, + "content": " // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n" + }, + { + "chunk_id": "doc_2_chunk_4", + "original_index": 4, + "content": " // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n" + }, + { + "chunk_id": "doc_2_chunk_5", + "original_index": 5, + "content": " // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n" + }, + { + "chunk_id": "doc_2_chunk_6", + "original_index": 6, + "content": " // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n" + } + ] + }, + { + "doc_id": "doc_3", + "original_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", + "content": "use core::{ffi::c_void, fmt::Debug};\nuse std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};\n\nuse libafl::{\n events::EventFirer,\n executors::ExitKind,\n feedbacks::Feedback,\n inputs::UsesInput,\n observers::{Observer, ObserversTuple},\n state::State,\n Error,\n};\nuse libafl_bolts::Named;\nuse libc::SIGABRT;\nuse serde::{Deserialize, Serialize};\n\nextern \"C\" {\n fn libafl_check_malloc_size(ptr: *const c_void) -> usize;\n}\n\nstatic RUNNING: AtomicBool = AtomicBool::new(false);\nstatic OOMED: AtomicBool = AtomicBool::new(false);\nstatic RSS_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n// 2GB, which is the default\nstatic MALLOC_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n\nstatic MALLOC_SIZE: AtomicUsize = AtomicUsize::new(0);\n\n/// malloc hook which will be invoked if address sanitizer is present. Used to detect if the target makes a malloc call\n/// that will exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid freshly allocated pointers backed by allocations of `size`.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_malloc_hook(ptr: *const c_void, size: usize) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = match unsafe { libafl_check_malloc_size(ptr) } {\n 0 => size, // either the malloc size function didn't work or it's really zero-sized\n real => real,\n };\n\n let total = MALLOC_SIZE.fetch_add(size, Ordering::Relaxed) + size;\n if (size > MALLOC_MAX.load(Ordering::Relaxed) || total > RSS_MAX.load(Ordering::Relaxed))\n && !OOMED.swap(true, Ordering::Relaxed)\n {\n unsafe {\n // we need to kill the process in a way that immediately triggers the crash handler\n libc::raise(SIGABRT);\n }\n }\n }\n}\n\n/// free hook which will be invoked if ASAN is present. Used to detect if the target makes a malloc call that will\n/// exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid allocated pointers, about to be freed.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_free_hook(ptr: *const c_void) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = unsafe { libafl_check_malloc_size(ptr) };\n MALLOC_SIZE\n .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |existing| {\n Some(existing.saturating_sub(size))\n })\n .expect(\"must complete successfully\");\n }\n}\n\nconst OOM_OBS_NAME: &str = \"libfuzzer-like-oom\";\n\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n\nimpl Named for OomObserver {\n // strictly one name to prevent two from being registered\n fn name(&self) -> &str {\n OOM_OBS_NAME\n }\n}\n\nimpl Observer for OomObserver\nwhere\n S: UsesInput,\n{\n fn pre_exec(&mut self, _state: &mut S, _input: &S::Input) -> Result<(), Error> {\n OOMED.store(false, Ordering::Relaxed);\n // must reset for platforms which do not offer malloc tracking\n MALLOC_SIZE.store(0, Ordering::Relaxed);\n RUNNING.store(true, Ordering::Relaxed);\n Ok(())\n }\n\n fn post_exec(\n &mut self,\n _state: &mut S,\n _input: &S::Input,\n _exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n RUNNING.store(false, Ordering::Relaxed);\n self.oomed = OOMED.load(Ordering::Relaxed);\n Ok(())\n }\n\n fn pre_exec_child(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.pre_exec(state, input)\n }\n\n fn post_exec_child(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.post_exec(state, input, exit_kind)\n }\n}\n\n/// Feedback for the similarly named [`OomObserver`] to detect if the target crashed due to an observed OOM\n#[derive(Debug, Serialize, Deserialize, Copy, Clone, Default)]\npub struct OomFeedback;\n\nimpl OomFeedback {\n /// Whether the target OOM'd in the last execution\n pub fn oomed() -> bool {\n OOMED.load(Ordering::Relaxed)\n }\n}\n\nimpl Named for OomFeedback {\n fn name(&self) -> &str {\n \"oom\"\n }\n}\n\nimpl Feedback for OomFeedback\nwhere\n S: State,\n{\n fn is_interesting(\n &mut self,\n _state: &mut S,\n _manager: &mut EM,\n _input: &S::Input,\n _observers: &OT,\n _exit_kind: &ExitKind,\n ) -> Result\n where\n EM: EventFirer,\n OT: ObserversTuple,\n {\n Ok(Self::oomed())\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_3_chunk_0", + "original_index": 0, + "content": "use core::{ffi::c_void, fmt::Debug};\nuse std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};\n\nuse libafl::{\n events::EventFirer,\n executors::ExitKind,\n feedbacks::Feedback,\n inputs::UsesInput,\n observers::{Observer, ObserversTuple},\n state::State,\n Error,\n};\nuse libafl_bolts::Named;\nuse libc::SIGABRT;\nuse serde::{Deserialize, Serialize};\n\nextern \"C\" {\n fn libafl_check_malloc_size(ptr: *const c_void) -> usize;\n}\n\nstatic RUNNING: AtomicBool = AtomicBool::new(false);\nstatic OOMED: AtomicBool = AtomicBool::new(false);\nstatic RSS_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n// 2GB, which is the default\nstatic MALLOC_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n\nstatic MALLOC_SIZE: AtomicUsize = AtomicUsize::new(0);\n\n" + }, + { + "chunk_id": "doc_3_chunk_1", + "original_index": 1, + "content": "/// malloc hook which will be invoked if address sanitizer is present. Used to detect if the target makes a malloc call\n/// that will exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid freshly allocated pointers backed by allocations of `size`.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_malloc_hook(ptr: *const c_void, size: usize) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = match unsafe { libafl_check_malloc_size(ptr) } {\n 0 => size, // either the malloc size function didn't work or it's really zero-sized\n real => real,\n };\n\n" + }, + { + "chunk_id": "doc_3_chunk_2", + "original_index": 2, + "content": " let total = MALLOC_SIZE.fetch_add(size, Ordering::Relaxed) + size;\n if (size > MALLOC_MAX.load(Ordering::Relaxed) || total > RSS_MAX.load(Ordering::Relaxed))\n && !OOMED.swap(true, Ordering::Relaxed)\n {\n unsafe {\n // we need to kill the process in a way that immediately triggers the crash handler\n libc::raise(SIGABRT);\n }\n }\n }\n}\n\n" + }, + { + "chunk_id": "doc_3_chunk_3", + "original_index": 3, + "content": "/// free hook which will be invoked if ASAN is present. Used to detect if the target makes a malloc call that will\n/// exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid allocated pointers, about to be freed.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_free_hook(ptr: *const c_void) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = unsafe { libafl_check_malloc_size(ptr) };\n MALLOC_SIZE\n .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |existing| {\n Some(existing.saturating_sub(size))\n })\n .expect(\"must complete successfully\");\n }\n}\n\n" + }, + { + "chunk_id": "doc_3_chunk_4", + "original_index": 4, + "content": "const OOM_OBS_NAME: &str = \"libfuzzer-like-oom\";\n\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n\nimpl Named for OomObserver {\n // strictly one name to prevent two from being registered\n fn name(&self) -> &str {\n OOM_OBS_NAME\n }\n}\n\n" + }, + { + "chunk_id": "doc_3_chunk_5", + "original_index": 5, + "content": "impl Observer for OomObserver\nwhere\n S: UsesInput,\n{\n fn pre_exec(&mut self, _state: &mut S, _input: &S::Input) -> Result<(), Error> {\n OOMED.store(false, Ordering::Relaxed);\n // must reset for platforms which do not offer malloc tracking\n MALLOC_SIZE.store(0, Ordering::Relaxed);\n RUNNING.store(true, Ordering::Relaxed);\n Ok(())\n }\n\n fn post_exec(\n &mut self,\n _state: &mut S,\n _input: &S::Input,\n _exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n RUNNING.store(false, Ordering::Relaxed);\n self.oomed = OOMED.load(Ordering::Relaxed);\n Ok(())\n }\n\n fn pre_exec_child(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.pre_exec(state, input)\n }\n\n fn post_exec_child(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.post_exec(state, input, exit_kind)\n }\n}\n\n" + }, + { + "chunk_id": "doc_3_chunk_6", + "original_index": 6, + "content": "/// Feedback for the similarly named [`OomObserver`] to detect if the target crashed due to an observed OOM\n#[derive(Debug, Serialize, Deserialize, Copy, Clone, Default)]\npub struct OomFeedback;\n\nimpl OomFeedback {\n /// Whether the target OOM'd in the last execution\n pub fn oomed() -> bool {\n OOMED.load(Ordering::Relaxed)\n }\n}\n\nimpl Named for OomFeedback {\n fn name(&self) -> &str {\n \"oom\"\n }\n}\n\nimpl Feedback for OomFeedback\nwhere\n S: State,\n{\n fn is_interesting(\n &mut self,\n _state: &mut S,\n _manager: &mut EM,\n _input: &S::Input,\n _observers: &OT,\n _exit_kind: &ExitKind,\n ) -> Result\n where\n EM: EventFirer,\n OT: ObserversTuple,\n {\n Ok(Self::oomed())\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_4", + "original_uuid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", + "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", + "chunks": [ + { + "chunk_id": "doc_4_chunk_0", + "original_index": 0, + "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}" + } + ] + }, + { + "doc_id": "doc_5", + "original_uuid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", + "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", + "chunks": [ + { + "chunk_id": "doc_5_chunk_0", + "original_index": 0, + "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}" + } + ] + }, + { + "doc_id": "doc_6", + "original_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", + "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\nimpl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_6_chunk_0", + "original_index": 0, + "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n" + }, + { + "chunk_id": "doc_6_chunk_1", + "original_index": 1, + "content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n" + }, + { + "chunk_id": "doc_6_chunk_2", + "original_index": 2, + "content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_7", + "original_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", + "content": "use std::sync::OnceLock;\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::aarch64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n X0 = 0,\n X1 = 1,\n X2 = 2,\n X3 = 3,\n X4 = 4,\n X5 = 5,\n X6 = 6,\n X7 = 7,\n X8 = 8,\n X9 = 9,\n X10 = 10,\n X11 = 11,\n X12 = 12,\n X13 = 13,\n X14 = 14,\n X15 = 15,\n X16 = 16,\n X17 = 17,\n X18 = 18,\n X19 = 19,\n X20 = 20,\n X21 = 21,\n X22 = 22,\n X23 = 23,\n X24 = 24,\n X25 = 25,\n X26 = 26,\n X27 = 27,\n X28 = 28,\n X29 = 29,\n X30 = 30,\n Sp = 31,\n Pc = 32,\n Pstate = 33,\n}\n\nstatic BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::X0,\n BackdoorArgs::Cmd => Regs::X0,\n BackdoorArgs::Arg1 => Regs::X1,\n BackdoorArgs::Arg2 => Regs::X2,\n BackdoorArgs::Arg3 => Regs::X3,\n BackdoorArgs::Arg4 => Regs::X4,\n BackdoorArgs::Arg5 => Regs::X5,\n BackdoorArgs::Arg6 => Regs::X6,\n }\n })\n}\n\n/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Fp: Regs = Regs::X29;\n pub const Lr: Regs = Regs::X30;\n}\n\n/// Return an ARM64 ArchCapstoneBuilder\npub fn capstone() -> capstone::arch::arm64::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .arm64()\n .mode(capstone::arch::arm64::ArchMode::Arm)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n self.read_reg(Regs::Lr)\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n self.write_reg(Regs::Lr, val)\n }\n\n fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::X0,\n 1 => Regs::X1,\n 2 => Regs::X2,\n 3 => Regs::X3,\n 4 => Regs::X4,\n 5 => Regs::X5,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::X0, val),\n 1 => self.write_reg(Regs::X1, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_7_chunk_0", + "original_index": 0, + "content": "use std::sync::OnceLock;\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::aarch64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n X0 = 0,\n X1 = 1,\n X2 = 2,\n X3 = 3,\n X4 = 4,\n X5 = 5,\n X6 = 6,\n X7 = 7,\n X8 = 8,\n X9 = 9,\n X10 = 10,\n X11 = 11,\n X12 = 12,\n X13 = 13,\n X14 = 14,\n X15 = 15,\n X16 = 16,\n X17 = 17,\n X18 = 18,\n X19 = 19,\n X20 = 20,\n X21 = 21,\n X22 = 22,\n X23 = 23,\n X24 = 24,\n X25 = 25,\n X26 = 26,\n X27 = 27,\n X28 = 28,\n X29 = 29,\n X30 = 30,\n Sp = 31,\n Pc = 32,\n Pstate = 33,\n}\n\n" + }, + { + "chunk_id": "doc_7_chunk_1", + "original_index": 1, + "content": "static BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::X0,\n BackdoorArgs::Cmd => Regs::X0,\n BackdoorArgs::Arg1 => Regs::X1,\n BackdoorArgs::Arg2 => Regs::X2,\n BackdoorArgs::Arg3 => Regs::X3,\n BackdoorArgs::Arg4 => Regs::X4,\n BackdoorArgs::Arg5 => Regs::X5,\n BackdoorArgs::Arg6 => Regs::X6,\n }\n })\n}\n\n" + }, + { + "chunk_id": "doc_7_chunk_2", + "original_index": 2, + "content": "/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Fp: Regs = Regs::X29;\n pub const Lr: Regs = Regs::X30;\n}\n\n/// Return an ARM64 ArchCapstoneBuilder\npub fn capstone() -> capstone::arch::arm64::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .arm64()\n .mode(capstone::arch::arm64::ArchMode::Arm)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n self.read_reg(Regs::Lr)\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n self.write_reg(Regs::Lr, val)\n }\n\n" + }, + { + "chunk_id": "doc_7_chunk_3", + "original_index": 3, + "content": " fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::X0,\n 1 => Regs::X1,\n 2 => Regs::X2,\n 3 => Regs::X3,\n 4 => Regs::X4,\n 5 => Regs::X5,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n" + }, + { + "chunk_id": "doc_7_chunk_4", + "original_index": 4, + "content": " fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::X0, val),\n 1 => self.write_reg(Regs::X1, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_8", + "original_uuid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748", + "content": "use std::{mem::size_of, sync::OnceLock};\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::x86_64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n Rax = 0,\n Rbx = 1,\n Rcx = 2,\n Rdx = 3,\n Rsi = 4,\n Rdi = 5,\n Rbp = 6,\n Rsp = 7,\n R8 = 8,\n R9 = 9,\n R10 = 10,\n R11 = 11,\n R12 = 12,\n R13 = 13,\n R14 = 14,\n R15 = 15,\n Rip = 16,\n Rflags = 17,\n}\n\nstatic BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::Rax,\n BackdoorArgs::Cmd => Regs::Rax,\n BackdoorArgs::Arg1 => Regs::Rdi,\n BackdoorArgs::Arg2 => Regs::Rsi,\n BackdoorArgs::Arg3 => Regs::Rdx,\n BackdoorArgs::Arg4 => Regs::R10,\n BackdoorArgs::Arg5 => Regs::R8,\n BackdoorArgs::Arg6 => Regs::R9,\n }\n })\n}\n\n/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Sp: Regs = Regs::Rsp;\n pub const Pc: Regs = Regs::Rip;\n}\n\n/// Return an X86 `ArchCapstoneBuilder`\n#[must_use]\npub fn capstone() -> capstone::arch::x86::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .x86()\n .mode(capstone::arch::x86::ArchMode::Mode64)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let mut ret_addr = [0; size_of::()];\n unsafe { self.read_mem(stack_ptr, &mut ret_addr) };\n Ok(GuestReg::from_le_bytes(ret_addr).into())\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let val: GuestReg = val.into();\n let ret_addr = val.to_le_bytes();\n unsafe { self.write_mem(stack_ptr, &ret_addr) };\n Ok(())\n }\n\n fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::Rdi,\n 1 => Regs::Rsi,\n 2 => Regs::Rdx,\n 3 => Regs::Rcx,\n 4 => Regs::R8,\n 5 => Regs::R9,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::Rdi, val),\n 1 => self.write_reg(Regs::Rsi, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_8_chunk_0", + "original_index": 0, + "content": "use std::{mem::size_of, sync::OnceLock};\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::x86_64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n Rax = 0,\n Rbx = 1,\n Rcx = 2,\n Rdx = 3,\n Rsi = 4,\n Rdi = 5,\n Rbp = 6,\n Rsp = 7,\n R8 = 8,\n R9 = 9,\n R10 = 10,\n R11 = 11,\n R12 = 12,\n R13 = 13,\n R14 = 14,\n R15 = 15,\n Rip = 16,\n Rflags = 17,\n}\n\nstatic BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\n" + }, + { + "chunk_id": "doc_8_chunk_1", + "original_index": 1, + "content": "pub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::Rax,\n BackdoorArgs::Cmd => Regs::Rax,\n BackdoorArgs::Arg1 => Regs::Rdi,\n BackdoorArgs::Arg2 => Regs::Rsi,\n BackdoorArgs::Arg3 => Regs::Rdx,\n BackdoorArgs::Arg4 => Regs::R10,\n BackdoorArgs::Arg5 => Regs::R8,\n BackdoorArgs::Arg6 => Regs::R9,\n }\n })\n}\n\n/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Sp: Regs = Regs::Rsp;\n pub const Pc: Regs = Regs::Rip;\n}\n\n/// Return an X86 `ArchCapstoneBuilder`\n#[must_use]\npub fn capstone() -> capstone::arch::x86::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .x86()\n .mode(capstone::arch::x86::ArchMode::Mode64)\n}\n\npub type GuestReg = u64;\n\n" + }, + { + "chunk_id": "doc_8_chunk_2", + "original_index": 2, + "content": "impl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let mut ret_addr = [0; size_of::()];\n unsafe { self.read_mem(stack_ptr, &mut ret_addr) };\n Ok(GuestReg::from_le_bytes(ret_addr).into())\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let val: GuestReg = val.into();\n let ret_addr = val.to_le_bytes();\n unsafe { self.write_mem(stack_ptr, &ret_addr) };\n Ok(())\n }\n\n" + }, + { + "chunk_id": "doc_8_chunk_3", + "original_index": 3, + "content": " fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::Rdi,\n 1 => Regs::Rsi,\n 2 => Regs::Rdx,\n 3 => Regs::Rcx,\n 4 => Regs::R8,\n 5 => Regs::R9,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n" + }, + { + "chunk_id": "doc_8_chunk_4", + "original_index": 4, + "content": " fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::Rdi, val),\n 1 => self.write_reg(Regs::Rsi, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_9", + "original_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", + "content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\nimpl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n /// Create an empty [`Input`]\n #[must_use]\n pub fn empty() -> Self {\n Self {\n tree: Tree {\n rules: vec![],\n sizes: vec![],\n paren: vec![],\n },\n }\n }\n\n /// Generate a `Nautilus` input from the given bytes\n pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) {\n bytes.clear();\n self.tree.unparse(NodeID::from(0), &context.ctx, bytes);\n }\n\n /// Get the tree representation of this input\n #[must_use]\n pub fn tree(&self) -> &Tree {\n &self.tree\n }\n\n /// Get the tree representation of this input, as a mutable reference\n #[must_use]\n pub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n }\n}\n\nimpl Hash for NautilusInput {\n fn hash(&self, state: &mut H) {\n self.tree().paren.hash(state);\n for r in &self.tree().rules {\n match r {\n RuleIDOrCustom::Custom(a, b) => {\n a.hash(state);\n b.hash(state);\n }\n RuleIDOrCustom::Rule(a) => a.hash(state),\n }\n }\n self.tree().sizes.hash(state);\n }\n}\n\n/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_9_chunk_0", + "original_index": 0, + "content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\n" + }, + { + "chunk_id": "doc_9_chunk_1", + "original_index": 1, + "content": "impl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n" + }, + { + "chunk_id": "doc_9_chunk_2", + "original_index": 2, + "content": " /// Create an empty [`Input`]\n #[must_use]\n pub fn empty() -> Self {\n Self {\n tree: Tree {\n rules: vec![],\n sizes: vec![],\n paren: vec![],\n },\n }\n }\n\n /// Generate a `Nautilus` input from the given bytes\n pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) {\n bytes.clear();\n self.tree.unparse(NodeID::from(0), &context.ctx, bytes);\n }\n\n" + }, + { + "chunk_id": "doc_9_chunk_3", + "original_index": 3, + "content": " /// Get the tree representation of this input\n #[must_use]\n pub fn tree(&self) -> &Tree {\n &self.tree\n }\n\n /// Get the tree representation of this input, as a mutable reference\n #[must_use]\n pub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n }\n}\n\nimpl Hash for NautilusInput {\n fn hash(&self, state: &mut H) {\n self.tree().paren.hash(state);\n for r in &self.tree().rules {\n match r {\n RuleIDOrCustom::Custom(a, b) => {\n a.hash(state);\n b.hash(state);\n }\n RuleIDOrCustom::Rule(a) => a.hash(state),\n }\n }\n self.tree().sizes.hash(state);\n }\n}\n\n" + }, + { + "chunk_id": "doc_9_chunk_4", + "original_index": 4, + "content": "/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_10", + "original_uuid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", + "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_10_chunk_0", + "original_index": 0, + "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_11", + "original_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", + "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", + "chunks": [ + { + "chunk_id": "doc_11_chunk_0", + "original_index": 0, + "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n" + }, + { + "chunk_id": "doc_11_chunk_1", + "original_index": 1, + "content": " _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n" + }, + { + "chunk_id": "doc_11_chunk_2", + "original_index": 2, + "content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n" + }, + { + "chunk_id": "doc_11_chunk_3", + "original_index": 3, + "content": " # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n" + }, + { + "chunk_id": "doc_11_chunk_4", + "original_index": 4, + "content": " continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n" + }, + { + "chunk_id": "doc_11_chunk_5", + "original_index": 5, + "content": " # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n" + }, + { + "chunk_id": "doc_11_chunk_6", + "original_index": 6, + "content": " # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n" + }, + { + "chunk_id": "doc_11_chunk_7", + "original_index": 7, + "content": " self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n" + }, + { + "chunk_id": "doc_11_chunk_8", + "original_index": 8, + "content": " target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n" + }, + { + "chunk_id": "doc_11_chunk_9", + "original_index": 9, + "content": " def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n" + } + ] + }, + { + "doc_id": "doc_12", + "original_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", + "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", + "chunks": [ + { + "chunk_id": "doc_12_chunk_0", + "original_index": 0, + "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n" + }, + { + "chunk_id": "doc_12_chunk_1", + "original_index": 1, + "content": " return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n" + } + ] + }, + { + "doc_id": "doc_13", + "original_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", + "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", + "chunks": [ + { + "chunk_id": "doc_13_chunk_0", + "original_index": 0, + "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n" + }, + { + "chunk_id": "doc_13_chunk_1", + "original_index": 1, + "content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n" + } + ] + }, + { + "doc_id": "doc_14", + "original_uuid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", + "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", + "chunks": [ + { + "chunk_id": "doc_14_chunk_0", + "original_index": 0, + "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n" + } + ] + }, + { + "doc_id": "doc_15", + "original_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", + "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Morse_code(Decoder[str]):\n # A priority list for char/word boundaries\n BOUNDARIES = {\" \": 1, \"/\": 2, \"\\n\": 3}\n PURGE = {ord(c): None for c in BOUNDARIES.keys()}\n MAX_PRIORITY = 3\n ALLOWED = {\".\", \"-\", \" \", \"/\", \"\\n\"}\n MORSE_CODE_DICT: Dict[str, str]\n MORSE_CODE_DICT_INV: Dict[str, str]\n\n def decode(self, ctext: T) -> Optional[U]:\n logging.debug(\"Attempting Morse code decoder\")\n\n char_boundary = word_boundary = None\n\n char_boundary = word_boundary = None\n char_priority = word_priority = 0\n # Custom loop allows early break\n for i in ctext:\n i_priority = self.BOUNDARIES.get(i)\n if i_priority is None:\n if i in self.ALLOWED:\n continue\n logging.debug(f\"Non-morse char '{i}' found\")\n return None\n\n if i_priority <= char_priority or i == char_boundary or i == word_boundary:\n continue\n # Default to having a char boundary over a word boundary\n if (\n i_priority > word_priority\n and word_boundary is None\n and char_boundary is not None\n ):\n word_priority = i_priority\n word_boundary = i\n continue\n char_priority = i_priority\n char_boundary = i\n\n logging.debug(\n f\"Char boundary is unicode {ord(char_boundary)}, and word boundary is unicode {ord(word_boundary) if word_boundary is not None else None}\"\n )\n\n result = \"\"\n\n for word in ctext.split(word_boundary) if word_boundary else [ctext]:\n logging.debug(f\"Attempting to decode word {word}\")\n for char in word.split(char_boundary):\n char = char.translate(self.PURGE)\n if len(char) == 0:\n continue\n try:\n m = self.MORSE_CODE_DICT_INV[char]\n except KeyError:\n logging.debug(f\"Invalid codeword '{char}' found\")\n return None\n result = result + m\n # after every word add a space\n result = result + \" \"\n if len(result) == 0:\n logging.debug(\"Morse code failed to match\")\n return None\n # Remove trailing space\n result = result[:-1]\n logging.info(f\"Morse code successful, returning {result}\")\n return result.strip().upper()\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.MORSE_CODE_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.MORSE_CODE_DICT_INV = {v: k for k, v in self.MORSE_CODE_DICT.items()}\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The morse code dictionary to use\",\n req=False,\n default=\"cipheydists::translate::morse\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"morse_code\"\n", + "chunks": [ + { + "chunk_id": "doc_15_chunk_0", + "original_index": 0, + "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Morse_code(Decoder[str]):\n # A priority list for char/word boundaries\n BOUNDARIES = {\" \": 1, \"/\": 2, \"\\n\": 3}\n PURGE = {ord(c): None for c in BOUNDARIES.keys()}\n MAX_PRIORITY = 3\n ALLOWED = {\".\", \"-\", \" \", \"/\", \"\\n\"}\n MORSE_CODE_DICT: Dict[str, str]\n MORSE_CODE_DICT_INV: Dict[str, str]\n\n def decode(self, ctext: T) -> Optional[U]:\n logging.debug(\"Attempting Morse code decoder\")\n\n char_boundary = word_boundary = None\n\n" + }, + { + "chunk_id": "doc_15_chunk_1", + "original_index": 1, + "content": " char_boundary = word_boundary = None\n char_priority = word_priority = 0\n # Custom loop allows early break\n for i in ctext:\n i_priority = self.BOUNDARIES.get(i)\n if i_priority is None:\n if i in self.ALLOWED:\n continue\n logging.debug(f\"Non-morse char '{i}' found\")\n return None\n\n" + }, + { + "chunk_id": "doc_15_chunk_2", + "original_index": 2, + "content": " if i_priority <= char_priority or i == char_boundary or i == word_boundary:\n continue\n # Default to having a char boundary over a word boundary\n if (\n i_priority > word_priority\n and word_boundary is None\n and char_boundary is not None\n ):\n word_priority = i_priority\n word_boundary = i\n continue\n char_priority = i_priority\n char_boundary = i\n\n logging.debug(\n f\"Char boundary is unicode {ord(char_boundary)}, and word boundary is unicode {ord(word_boundary) if word_boundary is not None else None}\"\n )\n\n result = \"\"\n\n" + }, + { + "chunk_id": "doc_15_chunk_3", + "original_index": 3, + "content": " for word in ctext.split(word_boundary) if word_boundary else [ctext]:\n logging.debug(f\"Attempting to decode word {word}\")\n for char in word.split(char_boundary):\n char = char.translate(self.PURGE)\n if len(char) == 0:\n continue\n try:\n m = self.MORSE_CODE_DICT_INV[char]\n except KeyError:\n logging.debug(f\"Invalid codeword '{char}' found\")\n return None\n result = result + m\n # after every word add a space\n result = result + \" \"\n if len(result) == 0:\n logging.debug(\"Morse code failed to match\")\n return None\n # Remove trailing space\n result = result[:-1]\n logging.info(f\"Morse code successful, returning {result}\")\n return result.strip().upper()\n\n" + }, + { + "chunk_id": "doc_15_chunk_4", + "original_index": 4, + "content": " @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.MORSE_CODE_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.MORSE_CODE_DICT_INV = {v: k for k, v in self.MORSE_CODE_DICT.items()}\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The morse code dictionary to use\",\n req=False,\n default=\"cipheydists::translate::morse\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"morse_code\"\n" + } + ] + }, + { + "doc_id": "doc_16", + "original_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", + "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", + "chunks": [ + { + "chunk_id": "doc_16_chunk_0", + "original_index": 0, + "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n" + }, + { + "chunk_id": "doc_16_chunk_1", + "original_index": 1, + "content": " def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n" + }, + { + "chunk_id": "doc_16_chunk_2", + "original_index": 2, + "content": " # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n" + }, + { + "chunk_id": "doc_16_chunk_3", + "original_index": 3, + "content": " sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n" + }, + { + "chunk_id": "doc_16_chunk_4", + "original_index": 4, + "content": " def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n" + }, + { + "chunk_id": "doc_16_chunk_5", + "original_index": 5, + "content": " for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n" + } + ] + }, + { + "doc_id": "doc_17", + "original_uuid": "44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3", + "content": "# by https://github.com/RustyDucky and https://github.com/lukasgabriel\n\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Tap_code(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Tap code decoding\n \"\"\"\n try:\n result = \"\"\n combinations = ctext.split(\" \")\n for fragment in combinations:\n result += self.TABLE.get(fragment)\n return result\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.06\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.TABLE = config.get_resource(self._params()[\"dict\"], Translation)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The table of letters used for the tap code interpretation.\",\n req=False,\n default=\"cipheydists::translate::tap_code\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"tap_code\"\n", + "chunks": [ + { + "chunk_id": "doc_17_chunk_0", + "original_index": 0, + "content": "# by https://github.com/RustyDucky and https://github.com/lukasgabriel\n\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Tap_code(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Tap code decoding\n \"\"\"\n try:\n result = \"\"\n combinations = ctext.split(\" \")\n for fragment in combinations:\n result += self.TABLE.get(fragment)\n return result\n except Exception:\n return None\n\n" + }, + { + "chunk_id": "doc_17_chunk_1", + "original_index": 1, + "content": " @staticmethod\n def priority() -> float:\n return 0.06\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.TABLE = config.get_resource(self._params()[\"dict\"], Translation)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The table of letters used for the tap code interpretation.\",\n req=False,\n default=\"cipheydists::translate::tap_code\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"tap_code\"\n" + } + ] + }, + { + "doc_id": "doc_18", + "original_uuid": "bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2", + "content": "from functools import lru_cache\nfrom typing import Any, Dict, Optional, Set\n\nimport cipheydists\nimport logging\n\nfrom ciphey.iface import (\n Config,\n Distribution,\n ParamSpec,\n ResourceLoader,\n Translation,\n WordList,\n registry,\n)\n\n\n@registry.register_multi(WordList, Distribution, Translation)\nclass CipheyDists(ResourceLoader):\n # _wordlists: Set[str] = frozenset({\"english\", \"english1000\", \"englishStopWords\"})\n # _brandons: Set[str] = frozenset({\"english\"})\n # _dists: Set[str] = frozenset({\"twist\"})\n # _translates: Set[str] = frozenset({\"morse\"})\n _getters = {\n \"list\": cipheydists.get_list,\n \"dist\": cipheydists.get_dist,\n \"brandon\": cipheydists.get_brandon,\n \"translate\": cipheydists.get_translate,\n }\n\n def whatResources(self) -> Optional[Set[str]]:\n pass\n\n @lru_cache()\n def getResource(self, name: str) -> Any:\n logging.debug(f\"Loading cipheydists resource {name}\")\n prefix, name = name.split(\"::\", 1)\n return self._getters[prefix](name)\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n", + "chunks": [ + { + "chunk_id": "doc_18_chunk_0", + "original_index": 0, + "content": "from functools import lru_cache\nfrom typing import Any, Dict, Optional, Set\n\nimport cipheydists\nimport logging\n\nfrom ciphey.iface import (\n Config,\n Distribution,\n ParamSpec,\n ResourceLoader,\n Translation,\n WordList,\n registry,\n)\n\n\n@registry.register_multi(WordList, Distribution, Translation)\nclass CipheyDists(ResourceLoader):\n # _wordlists: Set[str] = frozenset({\"english\", \"english1000\", \"englishStopWords\"})\n # _brandons: Set[str] = frozenset({\"english\"})\n # _dists: Set[str] = frozenset({\"twist\"})\n # _translates: Set[str] = frozenset({\"morse\"})\n _getters = {\n \"list\": cipheydists.get_list,\n \"dist\": cipheydists.get_dist,\n \"brandon\": cipheydists.get_brandon,\n \"translate\": cipheydists.get_translate,\n }\n\n" + }, + { + "chunk_id": "doc_18_chunk_1", + "original_index": 1, + "content": " def whatResources(self) -> Optional[Set[str]]:\n pass\n\n @lru_cache()\n def getResource(self, name: str) -> Any:\n logging.debug(f\"Loading cipheydists resource {name}\")\n prefix, name = name.split(\"::\", 1)\n return self._getters[prefix](name)\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n" + } + ] + }, + { + "doc_id": "doc_19", + "original_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", + "content": "# Translated to Python and adapted for Ciphey from the JS original at https://github.com/pshihn/base69\n\n\nimport re\nfrom math import ceil\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, WordList, registry\n\n\n@registry.register\nclass Base69(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base69 decoding\n \"\"\"\n # Remove whitespace\n try:\n ctext = re.sub(r\"\\s+\", \"\", ctext, flags=re.UNICODE)\n extra_bytes = 0\n clen = len(ctext)\n\n if ctext[:-1] == \"=\":\n extra_bytes = int(ctext[clen - 2])\n\n CHUNK_COUNT = ceil(clen / 16)\n result = [0 for _ in range(CHUNK_COUNT * 7 - extra_bytes)]\n\n for i in range(CHUNK_COUNT):\n chunk_string = ctext[i * 16 : (i + 1) * 16]\n if extra_bytes and (i == CHUNK_COUNT - 1):\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert[0 : 7 - extra_bytes]):\n result[n + i * 7] = elem\n else:\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert):\n result[n + i * 7] = elem % 256\n return bytearray(result).decode().strip(\"\\x00\")\n except Exception:\n return None\n\n def decode_chunk(self, s: str):\n padded_bytes = s.endswith(\"=\")\n\n decoded = [0 for _ in range(8)]\n for i in range(8):\n decoded[i] = (\n 0\n if i == 7 and padded_bytes\n else self.chars_to_byte(s[i * 2 : i * 2 + 2])\n )\n\n result = [0 for _ in range(7)]\n for i in range(7):\n t1 = decoded[i] << (i + 1)\n t2 = decoded[i + 1] >> (7 - i - 1)\n result[i] = t1 | t2\n return result\n\n def chars_to_byte(self, s: str):\n return (69 * self.CHARS.index(s[1])) + (self.CHARS.index(s[0]))\n\n @staticmethod\n def priority() -> float:\n # If this becomes lower or equal to the reverse, it breaks.\n # So I'll set it to 0.2 for now since it is very fast anyways.\n return 0.2\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.CHARS = config.get_resource(self._params()[\"dict\"], WordList)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The charset used for the decoder.\",\n req=False,\n default=\"cipheydists::list::base69\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"base69\"\n", + "chunks": [ + { + "chunk_id": "doc_19_chunk_0", + "original_index": 0, + "content": "# Translated to Python and adapted for Ciphey from the JS original at https://github.com/pshihn/base69\n\n\nimport re\nfrom math import ceil\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, WordList, registry\n\n\n@registry.register\nclass Base69(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base69 decoding\n \"\"\"\n # Remove whitespace\n try:\n ctext = re.sub(r\"\\s+\", \"\", ctext, flags=re.UNICODE)\n extra_bytes = 0\n clen = len(ctext)\n\n if ctext[:-1] == \"=\":\n extra_bytes = int(ctext[clen - 2])\n\n CHUNK_COUNT = ceil(clen / 16)\n result = [0 for _ in range(CHUNK_COUNT * 7 - extra_bytes)]\n\n" + }, + { + "chunk_id": "doc_19_chunk_1", + "original_index": 1, + "content": " for i in range(CHUNK_COUNT):\n chunk_string = ctext[i * 16 : (i + 1) * 16]\n if extra_bytes and (i == CHUNK_COUNT - 1):\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert[0 : 7 - extra_bytes]):\n result[n + i * 7] = elem\n else:\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert):\n result[n + i * 7] = elem % 256\n return bytearray(result).decode().strip(\"\\x00\")\n except Exception:\n return None\n\n" + }, + { + "chunk_id": "doc_19_chunk_2", + "original_index": 2, + "content": " def decode_chunk(self, s: str):\n padded_bytes = s.endswith(\"=\")\n\n decoded = [0 for _ in range(8)]\n for i in range(8):\n decoded[i] = (\n 0\n if i == 7 and padded_bytes\n else self.chars_to_byte(s[i * 2 : i * 2 + 2])\n )\n\n result = [0 for _ in range(7)]\n for i in range(7):\n t1 = decoded[i] << (i + 1)\n t2 = decoded[i + 1] >> (7 - i - 1)\n result[i] = t1 | t2\n return result\n\n" + }, + { + "chunk_id": "doc_19_chunk_3", + "original_index": 3, + "content": " def chars_to_byte(self, s: str):\n return (69 * self.CHARS.index(s[1])) + (self.CHARS.index(s[0]))\n\n @staticmethod\n def priority() -> float:\n # If this becomes lower or equal to the reverse, it breaks.\n # So I'll set it to 0.2 for now since it is very fast anyways.\n return 0.2\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.CHARS = config.get_resource(self._params()[\"dict\"], WordList)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The charset used for the decoder.\",\n req=False,\n default=\"cipheydists::list::base69\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"base69\"\n" + } + ] + }, + { + "doc_id": "doc_20", + "original_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", + "content": "import pytest\n\nfrom ciphey import decrypt\nfrom ciphey.iface import Config\n\nanswer_str = \"Hello my name is bee and I like dog and apple and tree\"\n\n\ndef test_a1z26():\n res = decrypt(\n Config().library_default().complete_config(),\n \"8 5 12 12 15 13 25 14 1 13 5 9 19 2 5 5 1 14 4 9 12 9 11 5 4 15 7 1 14 4 1 16 16 12 5 1 14 4 20 18 5 5\",\n )\n assert res == \"hellomynameisbeeandilikedogandappleandtree\"\n\n\ndef test_affine():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Ihsst bf kxbh rd ghh xky R srjh ytz xky xccsh xky muhh\",\n )\n assert res == answer_str\n\n\ndef test_ascii_shift():\n res = decrypt(\n Config().library_default().complete_config(),\n '\"?FFIzGSzH;G?zCMzz#zFCE?z>IAz;H>z;JJF?z;H>zNL??',\n )\n assert res == answer_str\n\n\ndef test_atbash():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Svool nb mznv rh yvv zmw R orpv wlt zmw zkkov zmw givv\",\n )\n assert res == answer_str\n\n\ndef test_baconian_complete_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABB ABABB ABBBA ABBAA BBAAA ABBAB AAAAA ABBAA AABAA ABAAA BAABA AAAAB AABAA AABAA AAAAA ABBAB AAABB ABAAA ABABB ABAAA ABABA AABAA AAABB ABBBA AABBA AAAAA ABBAB AAABB AAAAA ABBBB ABBBB ABABB AABAA AAAAA ABBAB AAABB BAABB BAAAB AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n\ndef test_baconian_standard_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABA ABABA ABBAB ABABB BABBA ABBAA AAAAA ABABB AABAA ABAAA BAAAB AAAAB AABAA AABAA AAAAA ABBAA AAABB ABAAA ABABA ABAAA ABAAB AABAA AAABB ABBAB AABBA AAAAA ABBAA AAABB AAAAA ABBBA ABBBA ABABA AABAA AAAAA ABBAA AAABB BAABA BAAAA AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n\ndef test_base32():\n res = decrypt(\n Config().library_default().complete_config(),\n \"JBSWY3DPEBWXSIDOMFWWKIDJOMQGEZLFEBQW4ZBAJEQGY2LLMUQGI33HEBQW4ZBAMFYHA3DFEBQW4ZBAORZGKZI=\",\n )\n assert res == answer_str\n\n\ndef test_base58_bitcoin():\n res = decrypt(\n Config().library_default().complete_config(),\n \"6qYhNwsP46Mn4gy6gyANfsMm2icAxGFA6gnFjVm9phYHeby7PZm3vthiXxSU77teQgTFGbHETn\",\n )\n assert res == answer_str\n\n\ndef test_base58_ripple():\n res = decrypt(\n Config().library_default().complete_config(),\n \"aqY64A1PhaM8hgyagyw4C1Mmp5cwxGEwag8EjVm9F6YHebyfPZmsvt65XxS7ffteQgTEGbHNT8\",\n )\n assert res == answer_str\n\n\ndef test_base62():\n res = decrypt(\n Config().library_default().complete_config(),\n \"2mQvnz9Yevvb7DRCuyDltsP31vJLToR5pjE9orWkzHMUsht2kbC96PLbZ1sdIocsGHENrzC2n\",\n )\n assert res == answer_str\n\n\ndef test_base64():\n res = decrypt(\n Config().library_default().complete_config(),\n \"SGVsbG8gbXkgbmFtZSBpcyBiZWUgYW5kIEkgbGlrZSBkb2cgYW5kIGFwcGxlIGFuZCB0cmVl\",\n )\n\n assert res == answer_str\n\n\ndef test_base69():\n res = decrypt(\n Config().library_default().complete_config(),\n \"kAZAtABBeB8A-AoB8ADBNAhBLA1AFBgA0AXBfBGATAVAFBgAwAWBHBu7ARt\",\n )\n assert res == answer_str\n\n\ndef test_base91():\n res = decrypt(\n Config().library_default().complete_config(),\n \">OwJh>=/fV@$x88j9ZNKB*ge$yV%lE%ZKi,+<]>-.-[+>-----<]>++.+++++++..+++.+[+>++<]>.[++>+<]>---.--[+++>-<]>.-[+>++++<]>.[++>+<]>--.-[+++>++<]>-.+[-->---<]>.--------.[+++++>+<]>+.-[+++>--<]>-.++++++++++.---[+>++<]>.[+++>-<]>++.+++..[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.-[+++>+<]>--.-[+>----<]>.++[+++>--<]>.---.++.------.[+++++>+<]>+.+[+>---<]>+.+++++++++++.--------.-[+++>-<]>--.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[+++>-<]>+.-[-->---<]>..----.-------.[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[++>+<]>++++.--.-------------..\",\n )\n assert res == answer_str\n\n\ndef test_brandon():\n res = decrypt(\n Config().library_default().complete_config(),\n \"R hvv blf tzgsvi yvuliv nv...sfmtib...gviirurvw... Xofgxsrmt blfi yzyvh gl blfi yivzhg. Vnkvili Vnsbi srh nzixsvw srh ovtrlmh rmgl lfi ozmwh... Ozrw hrvtv gl vevib uligivhh uiln sviv gl gsv Yofv Nlfmgzrmh. Izyrw zmw izevmlfh, sv yrgvh zmw yrgvh zdzb. Nvm lu gsv Mligs, blf hgzmw zg gsv kivxrkrxv. Blfi prmth szev uzrovw blf, hl mld blf gfim gl gsv tlwh! Zmw bvg blf wl mlg kovzw? Blf wl mlg pmvvo gl wfhg blfi svzwh drgs zhs? Rmhgvzw blf dzro, Dsb szev gsv tlwh ulihzpvm fh? Dv nfhg ollp rmgl gsv girzoh dv uzrovw olmt ztl! Rm z grnv kzhhvw, lfi dliow rmgvigdrmvw drgs zmlgsvi gsilfts zm fksvzezo hxslozih xzoo gsv Xlmqfmxgrlm lu gsv Hksvivh... Gsv tlwh zooldvw fmslob ulixvh gl hork rmgl lfi wlnzrm. Gsv luuhkirmt lu gszg xzgzxobhn dzh gsv mvuvirlfh ulixv xzoovw nztrx... Bvg dv wrw mlg yzmrhs rg, rmhgvzw hgfwbrmt gsv erov zixzmv uli lfi kldvi zmw dvzogs! Zmw gsv nlmhgvih zg lfi wlli...gsv fmslob ivorxgh lu gsrh Xlmqfmxgrlm? ...gsv gilooh...gsv xlikhv vzgvih...gsv dvivdloevh? Wrw dv izrhv lfi hdliwh ztzrmhg gsvn? Li szev dv ozrw gsrh yfiwvm lm lgsvih? Lm hl-xzoovw drgxsvih? Hgizb xsrowivm gzftsg gsv dzbh lu ulfo hlixvib, gsvri ylwrvh nfgzgvw gsilfts yozhksvnlfh irgfzo. Hvmg gl urtsg nlmhgvih gslfts gsvb xlfow mlg wrhgrmtfrhs tllw uiln vero. Gsv uorxpvi lu sfnzmrgb olmt vcgrmtfrhsvw drgsrm gsvn. Bvh, gsvri mfnyvih szev wdrmwovw gsilfts gsv bvzih. Yfg z uvd hgroo ilzn lfi ozmwh, luuvirmt gsvri yollwb dlip uli xlrm. Gl gsrh wzb gsvb hsznv fh drgs gsvri evib vcrhgvmxv! Gsv Mligs yovvwh, uolttvw yb dzi. Gsv yzggovh ziv gsv tlwh' dsrk, xszhgrhvnvmg uli lfi hrmh! Zmw ovg fh mlg ulitvg gsv gviilih, gsv hxlfitvh uiln yvblmw lfi dliow! Gsv Drow Sfmg irwvh gsv hpb drgs vevib ufoo nllm! Gsv wzip izrwvih zywfxg lfi xsrowivm rmgl ozmwh fmpmldm! Hlnv hzb gsvb svizow z hvxlmw Xlmqfmxgrlm! Xzm dv xszig z xlfihv yzxp rmgl gsv ortsg? Droo dv urmw gsv hgivmtgs gl yzmrhs gsv nztvh uiln lfi prmtwlnh? Fmrgv zilfmw gsv dzings lu gsv Vgvimzo Uriv? Mrts rh gsv Grnv lu gsv Hdliw zmw gsv Zcv! Mlmv droo urtsg gsrh dzi rm lfi hgvzw! Mrts rh gsv Grnv lu Nzwmvhh zmw Wrhwzrm!\",\n )\n assert bool(res) is True\n\n\ndef test_caesar():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Uryyb zl anzr vf orr naq V yvxr qbt naq nccyr naq gerr\",\n )\n assert res == answer_str\n\n\ndef test_decimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"72 101 108 108 111 32 109 121 32 110 97 109 101 32 105 115 32 98 101 101 32 97 110 100 32 73 32 108 105 107 101 32 100 111 103 32 97 110 100 32 97 112 112 108 101 32 97 110 100 32 116 114 101 101\",\n )\n assert res == answer_str\n\n\ndef test_dna():\n res = decrypt(\n Config().library_default().complete_config(),\n \"GAT AAT GCT ATT TCT ATT AAT ACT GAA CGT GAA TCT ACT ATT AAT GGT\",\n )\n assert res == \"DNAISINTERESTING\"\n\n\ndef test_dtmf():\n res = decrypt(\n Config().library_default().complete_config(),\n \"1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697\",\n )\n assert res == answer_str\n\n\ndef test_galactic():\n res = decrypt(\n Config().library_default().complete_config(),\n \"\u2351\u14b7\ua58e\ua58e\ud835\ude79 \u14b2|| \u30ea\u1511\u14b2\u14b7 \u254e\u14ed \u0296\u14b7\u14b7 \u1511\u30ea\u21b8 i \ua58e\u254e\ua58c\u14b7 \u21b8\ud835\ude79\u22a3 \u1511\u30ea\u21b8 \u1511!\u00a1!\u00a1\ua58e\u14b7 \u1511\u30ea\u21b8 \u2138 \u0323 \u2237\u14b7\u14b7\",\n )\n assert res == answer_str.lower()\n\n\ndef test_galactic_Xproblem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"\u2351\u1511\ua58e\u254e\u2393\u1511 \u0307/, \u0307/||\ua58e\ud835\ude79!\u00a1\u2351\ud835\ude79\u30ea\u14b7, \u1511 \u0307/ \u1511\ua58e\ud835\ude79\u30ea\u14b7 \u1511\u30ea\u21b8 \u0307/\u14b7\u2237\ud835\ude79 \u0307/ \u2393\u2237\ud835\ude79\u14b2 \ud835\ude79 \u0307/\u2393\ud835\ude79\u2237\u21b8\",\n )\n assert res == \"halifax, xylophone, a x alone and xerox from oxford\"\n\n\ndef test_gzip():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H4sIAAzul18A/yXJzQmAMBSEwVa+ckwZT7LIw80P6sXuA3ocZpM9aC89msibXSJ6peA8RR3Hx5jTfzyXtAAbQvCyNgAAAA==\",\n )\n assert res == answer_str\n\n\ndef test_hexadecimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"48 65 6c 6c 6f 20 6d 79 20 6e 61 6d 65 20 69 73 20 62 65 65 20 61 6e 64 20 49 20 6c 69 6b 65 20 64 6f 67 20 61 6e 64 20 61 70 70 6c 65 20 61 6e 64 20 74 72 65 65\",\n )\n\n assert res == answer_str\n\n\ndef test_json_problem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"0110100001100101011011000110110001101111\",\n )\n assert res != \"0110100001100101011011000110110001101111\"\n\n\ndef test_leetspeak():\n res = decrypt(\n Config().library_default().complete_config(),\n \"|-|3ll0 my n4m3 1s 833 4nd 1 l1k3 D06 4ND 4ppl3 4nd 7R33\",\n )\n assert res.lower() == answer_str.lower()\n\n\ndef test_morse_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \".... . .-.. .-.. ---/-- -.--/-. .- -- ./.. .../-... . ./.- -. -../../.-.. .. -.- ./-.. --- --./.- -. -../.- .--. .--. .-.. ./.- -. -../- .-. . .\",\n )\n assert res == answer_str.upper()\n\n\ndef test_multi_tap():\n res = decrypt(\n Config().library_default().complete_config(),\n \"44 33 555 555 666 0 6 999 0 66 2 6 33 0 444 7777 0 22 33 33 0 2 66 3 0 444 0 555 444 55 33 0 3 666 4 0 2 66 3 0 2 7 7 555 33 0 2 66 3 0 8 777 33 33\",\n )\n assert res == answer_str.upper()\n\n\ndef test_new_line_at_start_returns():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"\\npass\\n\")\n\n assert res == \"\\npass\\n\"\n\n\ndef test_new_line_strip_and_return():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"pass\\n\")\n\n assert res == \"pass\\n\"\n\n\ndef test_octal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"110 145 154 154 157 40 155 171 40 156 141 155 145 40 151 163 40 142 145 145 40 141 156 144 40 111 40 154 151 153 145 40 144 157 147 40 141 156 144 40 141 160 160 154 145 40 141 156 144 40 164 162 145 145\",\n )\n assert res == answer_str\n\n\ndef test_plaintext():\n res = decrypt(Config().library_default().complete_config(), answer_str)\n assert res == answer_str\n\n\ndef test_quadgrams_messed_up_spacing():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\",\n )\n assert (\n res == \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\"\n )\n\n\ndef test_quadgrams_no_spaces():\n res = decrypt(\n Config().library_default().complete_config(),\n \"HellomynameisbeeandIlikedogandappleandtree\",\n )\n assert res == \"HellomynameisbeeandIlikedogandappleandtree\"\n\n\ndef test_quadgrams_space_between_every_letter():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\",\n )\n assert (\n res\n == \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\"\n )\n\n\ndef test_reversed_text():\n res = decrypt(\n Config().library_default().complete_config(),\n \"eert dna elppa dna god ekil I dna eeb si eman ym olleH\",\n )\n assert res == answer_str\n\n\ndef test_rot47():\n res = decrypt(\n Config().library_default().complete_config(),\n \"$A9:?I @7 3=24< BF2CEK[ ;F586 >J G@H\",\n )\n assert res == \"Sphinx of black quartz, judge my vow\"\n\n\ndef test_soundex():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H236 I200 I500 T000 P230\",\n )\n assert res.lower() == \"history is in the past\"\n\n\ndef test_tap_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \"4,4 1,5 4,3 4,4 3,4 3,3 1,5 4,4 5,2 3,4 4,4 2,3 4,2 1,5 1,5\",\n )\n assert res == \"test one two three\".upper()\n\n\ndef test_url():\n res = decrypt(\n Config().library_default().complete_config(),\n \"https%3A%2F%2Fwww%2Egoogle%2Ecom%2Fsearch%3Fq%3Dciphey\",\n )\n assert res == \"https://www.google.com/search?q=ciphey\"\n\n\ndef test_uuencode():\n res = decrypt(\n Config().library_default().complete_config(),\n 'begin 644 /dev/stdout\\nM2&5L;&\\\\@;7D@;F%M92!Iz#zFCE?z>IAz;H>z;JJF?z;H>zNL??',\n )\n assert res == answer_str\n\n\ndef test_atbash():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Svool nb mznv rh yvv zmw R orpv wlt zmw zkkov zmw givv\",\n )\n assert res == answer_str\n\n" + }, + { + "chunk_id": "doc_20_chunk_2", + "original_index": 2, + "content": "\ndef test_baconian_complete_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABB ABABB ABBBA ABBAA BBAAA ABBAB AAAAA ABBAA AABAA ABAAA BAABA AAAAB AABAA AABAA AAAAA ABBAB AAABB ABAAA ABABB ABAAA ABABA AABAA AAABB ABBBA AABBA AAAAA ABBAB AAABB AAAAA ABBBB ABBBB ABABB AABAA AAAAA ABBAB AAABB BAABB BAAAB AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n" + }, + { + "chunk_id": "doc_20_chunk_3", + "original_index": 3, + "content": "\ndef test_baconian_standard_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABA ABABA ABBAB ABABB BABBA ABBAA AAAAA ABABB AABAA ABAAA BAAAB AAAAB AABAA AABAA AAAAA ABBAA AAABB ABAAA ABABA ABAAA ABAAB AABAA AAABB ABBAB AABBA AAAAA ABBAA AAABB AAAAA ABBBA ABBBA ABABA AABAA AAAAA ABBAA AAABB BAABA BAAAA AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n" + }, + { + "chunk_id": "doc_20_chunk_4", + "original_index": 4, + "content": "\ndef test_base32():\n res = decrypt(\n Config().library_default().complete_config(),\n \"JBSWY3DPEBWXSIDOMFWWKIDJOMQGEZLFEBQW4ZBAJEQGY2LLMUQGI33HEBQW4ZBAMFYHA3DFEBQW4ZBAORZGKZI=\",\n )\n assert res == answer_str\n\n\ndef test_base58_bitcoin():\n res = decrypt(\n Config().library_default().complete_config(),\n \"6qYhNwsP46Mn4gy6gyANfsMm2icAxGFA6gnFjVm9phYHeby7PZm3vthiXxSU77teQgTFGbHETn\",\n )\n assert res == answer_str\n\n\ndef test_base58_ripple():\n res = decrypt(\n Config().library_default().complete_config(),\n \"aqY64A1PhaM8hgyagyw4C1Mmp5cwxGEwag8EjVm9F6YHebyfPZmsvt65XxS7ffteQgTEGbHNT8\",\n )\n assert res == answer_str\n\n" + }, + { + "chunk_id": "doc_20_chunk_5", + "original_index": 5, + "content": "\ndef test_base62():\n res = decrypt(\n Config().library_default().complete_config(),\n \"2mQvnz9Yevvb7DRCuyDltsP31vJLToR5pjE9orWkzHMUsht2kbC96PLbZ1sdIocsGHENrzC2n\",\n )\n assert res == answer_str\n\n\ndef test_base64():\n res = decrypt(\n Config().library_default().complete_config(),\n \"SGVsbG8gbXkgbmFtZSBpcyBiZWUgYW5kIEkgbGlrZSBkb2cgYW5kIGFwcGxlIGFuZCB0cmVl\",\n )\n\n assert res == answer_str\n\n" + }, + { + "chunk_id": "doc_20_chunk_6", + "original_index": 6, + "content": "\ndef test_base69():\n res = decrypt(\n Config().library_default().complete_config(),\n \"kAZAtABBeB8A-AoB8ADBNAhBLA1AFBgA0AXBfBGATAVAFBgAwAWBHBu7ARt\",\n )\n assert res == answer_str\n\n\ndef test_base91():\n res = decrypt(\n Config().library_default().complete_config(),\n \">OwJh>=/fV@$x88j9ZNKB*ge$yV%lE%ZKi,+<]>-.-[+>-----<]>++.+++++++..+++.+[+>++<]>.[++>+<]>---.--[+++>-<]>.-[+>++++<]>.[++>+<]>--.-[+++>++<]>-.+[-->---<]>.--------.[+++++>+<]>+.-[+++>--<]>-.++++++++++.---[+>++<]>.[+++>-<]>++.+++..[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.-[+++>+<]>--.-[+>----<]>.++[+++>--<]>.---.++.------.[+++++>+<]>+.+[+>---<]>+.+++++++++++.--------.-[+++>-<]>--.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[+++>-<]>+.-[-->---<]>..----.-------.[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[++>+<]>++++.--.-------------..\",\n )\n assert res == answer_str\n\n" + }, + { + "chunk_id": "doc_20_chunk_11", + "original_index": 11, + "content": "\ndef test_brandon():\n res = decrypt(\n Config().library_default().complete_config(),\n \"R hvv blf tzgsvi yvuliv nv...sfmtib...gviirurvw... Xofgxsrmt blfi yzyvh gl blfi yivzhg. Vnkvili Vnsbi srh nzixsvw srh ovtrlmh rmgl lfi ozmwh... Ozrw hrvtv gl vevib uligivhh uiln sviv gl gsv Yofv Nlfmgzrmh. Izyrw zmw izevmlfh, sv yrgvh zmw yrgvh zdzb. Nvm lu gsv Mligs, blf hgzmw zg gsv kivxrkrxv. Blfi prmth szev uzrovw blf, hl mld blf gfim gl gsv tlwh! Zmw bvg blf wl mlg kovzw? Blf wl mlg pmvvo gl wfhg blfi svzwh drgs zhs? Rmhgvzw blf dzro, Dsb szev gsv tlwh ulihzpvm fh? Dv nfhg ollp rmgl gsv girzoh dv uzrovw olmt ztl! Rm z grnv kzhhvw, " + }, + { + "chunk_id": "doc_20_chunk_12", + "original_index": 12, + "content": "lfi dliow rmgvigdrmvw drgs zmlgsvi gsilfts zm fksvzezo hxslozih xzoo gsv Xlmqfmxgrlm lu gsv Hksvivh... Gsv tlwh zooldvw fmslob ulixvh gl hork rmgl lfi wlnzrm. Gsv luuhkirmt lu gszg xzgzxobhn dzh gsv mvuvirlfh ulixv xzoovw nztrx... Bvg dv wrw mlg yzmrhs rg, rmhgvzw hgfwbrmt gsv erov zixzmv uli lfi kldvi zmw dvzogs! Zmw gsv nlmhgvih zg lfi wlli...gsv fmslob ivorxgh lu gsrh Xlmqfmxgrlm? ...gsv gilooh...gsv xlikhv vzgvih...gsv dvivdloevh? Wrw dv izrhv lfi hdliwh ztzrmhg gsvn? Li szev dv ozrw gsrh yfiwvm lm lgsvih? Lm hl-xzoovw drgxsvih? Hgizb xsrowivm gzftsg gsv dzbh lu ulfo hlixvib, " + }, + { + "chunk_id": "doc_20_chunk_13", + "original_index": 13, + "content": "gsvri ylwrvh nfgzgvw gsilfts yozhksvnlfh irgfzo. Hvmg gl urtsg nlmhgvih gslfts gsvb xlfow mlg wrhgrmtfrhs tllw uiln vero. Gsv uorxpvi lu sfnzmrgb olmt vcgrmtfrhsvw drgsrm gsvn. Bvh, gsvri mfnyvih szev wdrmwovw gsilfts gsv bvzih. Yfg z uvd hgroo ilzn lfi ozmwh, luuvirmt gsvri yollwb dlip uli xlrm. Gl gsrh wzb gsvb hsznv fh drgs gsvri evib vcrhgvmxv! Gsv Mligs yovvwh, uolttvw yb dzi. Gsv yzggovh ziv gsv tlwh' dsrk, xszhgrhvnvmg uli lfi hrmh! Zmw ovg fh mlg ulitvg gsv gviilih, gsv hxlfitvh uiln yvblmw lfi dliow! Gsv Drow Sfmg irwvh gsv hpb drgs vevib ufoo nllm! Gsv wzip izrwvih zywfxg lfi xsrowivm rmgl ozmwh fmpmldm! Hlnv hzb gsvb svizow z hvxlmw Xlmqfmxgrlm! Xzm dv xszig z xlfihv yzxp rmgl gsv ortsg? Droo dv urmw gsv hgivmtgs gl yzmrhs gsv nztvh uiln lfi prmtwlnh? Fmrgv zilfmw gsv dzings lu gsv Vgvimzo Uriv? Mrts rh gsv Grnv lu gsv Hdliw zmw gsv Zcv! Mlmv droo urtsg gsrh dzi rm lfi hgvzw! Mrts rh gsv Grnv lu Nzwmvhh zmw Wrhwzrm!\",\n )\n assert bool(res) is True\n\n" + }, + { + "chunk_id": "doc_20_chunk_14", + "original_index": 14, + "content": "\ndef test_caesar():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Uryyb zl anzr vf orr naq V yvxr qbt naq nccyr naq gerr\",\n )\n assert res == answer_str\n\n\ndef test_decimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"72 101 108 108 111 32 109 121 32 110 97 109 101 32 105 115 32 98 101 101 32 97 110 100 32 73 32 108 105 107 101 32 100 111 103 32 97 110 100 32 97 112 112 108 101 32 97 110 100 32 116 114 101 101\",\n )\n assert res == answer_str\n\n\ndef test_dna():\n res = decrypt(\n Config().library_default().complete_config(),\n \"GAT AAT GCT ATT TCT ATT AAT ACT GAA CGT GAA TCT ACT ATT AAT GGT\",\n )\n assert res == \"DNAISINTERESTING\"\n\n\ndef test_dtmf():\n res = decrypt(\n Config().library_default().complete_config(),\n" + }, + { + "chunk_id": "doc_20_chunk_15", + "original_index": 15, + "content": " \"1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697\"," + }, + { + "chunk_id": "doc_20_chunk_16", + "original_index": 16, + "content": "\n )\n assert res == answer_str\n\n\ndef test_galactic():\n res = decrypt(\n Config().library_default().complete_config(),\n \"\u2351\u14b7\ua58e\ua58e\ud835\ude79 \u14b2|| \u30ea\u1511\u14b2\u14b7 \u254e\u14ed \u0296\u14b7\u14b7 \u1511\u30ea\u21b8 i \ua58e\u254e\ua58c\u14b7 \u21b8\ud835\ude79\u22a3 \u1511\u30ea\u21b8 \u1511!\u00a1!\u00a1\ua58e\u14b7 \u1511\u30ea\u21b8 \u2138 \u0323 \u2237\u14b7\u14b7\",\n )\n assert res == answer_str.lower()\n\n\ndef test_galactic_Xproblem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"\u2351\u1511\ua58e\u254e\u2393\u1511 \u0307/, \u0307/||\ua58e\ud835\ude79!\u00a1\u2351\ud835\ude79\u30ea\u14b7, \u1511 \u0307/ \u1511\ua58e\ud835\ude79\u30ea\u14b7 \u1511\u30ea\u21b8 \u0307/\u14b7\u2237\ud835\ude79 \u0307/ \u2393\u2237\ud835\ude79\u14b2 \ud835\ude79 \u0307/\u2393\ud835\ude79\u2237\u21b8\",\n )\n assert res == \"halifax, xylophone, a x alone and xerox from oxford\"\n\n\ndef test_gzip():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H4sIAAzul18A/yXJzQmAMBSEwVa+ckwZT7LIw80P6sXuA3ocZpM9aC89msibXSJ6peA8RR3Hx5jTfzyXtAAbQvCyNgAAAA==\",\n )\n assert res == answer_str\n\n" + }, + { + "chunk_id": "doc_20_chunk_17", + "original_index": 17, + "content": "\ndef test_hexadecimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"48 65 6c 6c 6f 20 6d 79 20 6e 61 6d 65 20 69 73 20 62 65 65 20 61 6e 64 20 49 20 6c 69 6b 65 20 64 6f 67 20 61 6e 64 20 61 70 70 6c 65 20 61 6e 64 20 74 72 65 65\",\n )\n\n assert res == answer_str\n\n\ndef test_json_problem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"0110100001100101011011000110110001101111\",\n )\n assert res != \"0110100001100101011011000110110001101111\"\n\n\ndef test_leetspeak():\n res = decrypt(\n Config().library_default().complete_config(),\n \"|-|3ll0 my n4m3 1s 833 4nd 1 l1k3 D06 4ND 4ppl3 4nd 7R33\",\n )\n assert res.lower() == answer_str.lower()\n\n" + }, + { + "chunk_id": "doc_20_chunk_18", + "original_index": 18, + "content": "\ndef test_morse_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \".... . .-.. .-.. ---/-- -.--/-. .- -- ./.. .../-... . ./.- -. -../../.-.. .. -.- ./-.. --- --./.- -. -../.- .--. .--. .-.. ./.- -. -../- .-. . .\",\n )\n assert res == answer_str.upper()\n\n\ndef test_multi_tap():\n res = decrypt(\n Config().library_default().complete_config(),\n \"44 33 555 555 666 0 6 999 0 66 2 6 33 0 444 7777 0 22 33 33 0 2 66 3 0 444 0 555 444 55 33 0 3 666 4 0 2 66 3 0 2 7 7 555 33 0 2 66 3 0 8 777 33 33\",\n )\n assert res == answer_str.upper()\n\n" + }, + { + "chunk_id": "doc_20_chunk_19", + "original_index": 19, + "content": "\ndef test_new_line_at_start_returns():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"\\npass\\n\")\n\n assert res == \"\\npass\\n\"\n\n\ndef test_new_line_strip_and_return():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"pass\\n\")\n\n assert res == \"pass\\n\"\n\n\ndef test_octal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"110 145 154 154 157 40 155 171 40 156 141 155 145 40 151 163 40 142 145 145 40 141 156 144 40 111 40 154 151 153 145 40 144 157 147 40 141 156 144 40 141 160 160 154 145 40 141 156 144 40 164 162 145 145\",\n )\n assert res == answer_str\n\n" + }, + { + "chunk_id": "doc_20_chunk_20", + "original_index": 20, + "content": "\ndef test_plaintext():\n res = decrypt(Config().library_default().complete_config(), answer_str)\n assert res == answer_str\n\n\ndef test_quadgrams_messed_up_spacing():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\",\n )\n assert (\n res == \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\"\n )\n\n\ndef test_quadgrams_no_spaces():\n res = decrypt(\n Config().library_default().complete_config(),\n \"HellomynameisbeeandIlikedogandappleandtree\",\n )\n assert res == \"HellomynameisbeeandIlikedogandappleandtree\"\n\n" + }, + { + "chunk_id": "doc_20_chunk_21", + "original_index": 21, + "content": "\ndef test_quadgrams_space_between_every_letter():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\",\n )\n assert (\n res\n == \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\"\n )\n\n\ndef test_reversed_text():\n res = decrypt(\n Config().library_default().complete_config(),\n \"eert dna elppa dna god ekil I dna eeb si eman ym olleH\",\n )\n assert res == answer_str\n\n" + }, + { + "chunk_id": "doc_20_chunk_22", + "original_index": 22, + "content": "\ndef test_rot47():\n res = decrypt(\n Config().library_default().complete_config(),\n \"$A9:?I @7 3=24< BF2CEK[ ;F586 >J G@H\",\n )\n assert res == \"Sphinx of black quartz, judge my vow\"\n\n\ndef test_soundex():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H236 I200 I500 T000 P230\",\n )\n assert res.lower() == \"history is in the past\"\n\n\ndef test_tap_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \"4,4 1,5 4,3 4,4 3,4 3,3 1,5 4,4 5,2 3,4 4,4 2,3 4,2 1,5 1,5\",\n )\n assert res == \"test one two three\".upper()\n\n\ndef test_url():\n res = decrypt(\n Config().library_default().complete_config(),\n \"https%3A%2F%2Fwww%2Egoogle%2Ecom%2Fsearch%3Fq%3Dciphey\",\n )\n assert res == \"https://www.google.com/search?q=ciphey\"\n\n" + }, + { + "chunk_id": "doc_20_chunk_23", + "original_index": 23, + "content": "\ndef test_uuencode():\n res = decrypt(\n Config().library_default().complete_config(),\n 'begin 644 /dev/stdout\\nM2&5L;&\\\\@;7D@;F%M92!I\n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\nstd::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\nstd::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\nstd::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\nstd::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n", + "chunks": [ + { + "chunk_id": "doc_21_chunk_0", + "original_index": 0, + "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\n" + }, + { + "chunk_id": "doc_21_chunk_1", + "original_index": 1, + "content": "std::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n" + }, + { + "chunk_id": "doc_21_chunk_2", + "original_index": 2, + "content": " \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\n" + }, + { + "chunk_id": "doc_21_chunk_3", + "original_index": 3, + "content": "std::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n" + }, + { + "chunk_id": "doc_21_chunk_4", + "original_index": 4, + "content": " // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n" + }, + { + "chunk_id": "doc_21_chunk_5", + "original_index": 5, + "content": " // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\n" + }, + { + "chunk_id": "doc_21_chunk_6", + "original_index": 6, + "content": "std::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n" + }, + { + "chunk_id": "doc_21_chunk_7", + "original_index": 7, + "content": " std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\n" + }, + { + "chunk_id": "doc_21_chunk_8", + "original_index": 8, + "content": "std::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n" + } + ] + }, + { + "doc_id": "doc_22", + "original_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", + "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\nprivate:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", + "chunks": [ + { + "chunk_id": "doc_22_chunk_0", + "original_index": 0, + "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n" + }, + { + "chunk_id": "doc_22_chunk_1", + "original_index": 1, + "content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n" + }, + { + "chunk_id": "doc_22_chunk_2", + "original_index": 2, + "content": "private:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n" + }, + { + "chunk_id": "doc_22_chunk_3", + "original_index": 3, + "content": " ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n" + }, + { + "chunk_id": "doc_22_chunk_4", + "original_index": 4, + "content": " /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n" + }, + { + "chunk_id": "doc_22_chunk_5", + "original_index": 5, + "content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\n" + }, + { + "chunk_id": "doc_22_chunk_6", + "original_index": 6, + "content": "private:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n" + }, + { + "chunk_id": "doc_22_chunk_7", + "original_index": 7, + "content": " template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n" + }, + { + "chunk_id": "doc_22_chunk_8", + "original_index": 8, + "content": " template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n" + }, + { + "chunk_id": "doc_22_chunk_9", + "original_index": 9, + "content": " template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n" + } + ] + }, + { + "doc_id": "doc_23", + "original_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", + "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_23_chunk_0", + "original_index": 0, + "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n" + }, + { + "chunk_id": "doc_23_chunk_1", + "original_index": 1, + "content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n" + }, + { + "chunk_id": "doc_23_chunk_2", + "original_index": 2, + "content": " /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n" + } + ] + }, + { + "doc_id": "doc_24", + "original_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", + "content": "#include \"type_parser.h\"\n\n#include \"clickhouse/exceptions.h\"\n#include \"clickhouse/base/platform.h\" // for _win_\n\n#include \n#include \n#include \n#include \n#include \n\n#if defined _win_\n#include \n#else\n#include \n#endif\n\n\nnamespace clickhouse {\n\nbool TypeAst::operator==(const TypeAst & other) const {\n return meta == other.meta\n && code == other.code\n && name == other.name\n && value == other.value\n && std::equal(elements.begin(), elements.end(), other.elements.begin(), other.elements.end());\n}\n\nstatic const std::unordered_map kTypeCode = {\n { \"Void\", Type::Void },\n { \"Int8\", Type::Int8 },\n { \"Int16\", Type::Int16 },\n { \"Int32\", Type::Int32 },\n { \"Int64\", Type::Int64 },\n { \"Bool\", Type::UInt8 },\n { \"UInt8\", Type::UInt8 },\n { \"UInt16\", Type::UInt16 },\n { \"UInt32\", Type::UInt32 },\n { \"UInt64\", Type::UInt64 },\n { \"Float32\", Type::Float32 },\n { \"Float64\", Type::Float64 },\n { \"String\", Type::String },\n { \"FixedString\", Type::FixedString },\n { \"DateTime\", Type::DateTime },\n { \"DateTime64\", Type::DateTime64 },\n { \"Date\", Type::Date },\n { \"Date32\", Type::Date32 },\n { \"Array\", Type::Array },\n { \"Nullable\", Type::Nullable },\n { \"Tuple\", Type::Tuple },\n { \"Enum8\", Type::Enum8 },\n { \"Enum16\", Type::Enum16 },\n { \"UUID\", Type::UUID },\n { \"IPv4\", Type::IPv4 },\n { \"IPv6\", Type::IPv6 },\n { \"Int128\", Type::Int128 },\n// { \"UInt128\", Type::UInt128 },\n { \"Decimal\", Type::Decimal },\n { \"Decimal32\", Type::Decimal32 },\n { \"Decimal64\", Type::Decimal64 },\n { \"Decimal128\", Type::Decimal128 },\n { \"LowCardinality\", Type::LowCardinality },\n { \"Map\", Type::Map },\n { \"Point\", Type::Point },\n { \"Ring\", Type::Ring },\n { \"Polygon\", Type::Polygon },\n { \"MultiPolygon\", Type::MultiPolygon },\n};\n\ntemplate \ninline int CompateStringsCaseInsensitive(const L& left, const R& right) {\n int64_t size_diff = left.size() - right.size();\n if (size_diff != 0)\n return size_diff > 0 ? 1 : -1;\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n}\n\nstatic Type::Code GetTypeCode(const std::string& name) {\n auto it = kTypeCode.find(name);\n if (it != kTypeCode.end()) {\n return it->second;\n }\n\n return Type::Void;\n}\n\nstatic TypeAst::Meta GetTypeMeta(const StringView& name) {\n if (name == \"Array\") {\n return TypeAst::Array;\n }\n\n if (name == \"Null\") {\n return TypeAst::Null;\n }\n\n if (name == \"Nullable\") {\n return TypeAst::Nullable;\n }\n\n if (name == \"Tuple\") {\n return TypeAst::Tuple;\n }\n\n if (name == \"Enum8\" || name == \"Enum16\") {\n return TypeAst::Enum;\n }\n\n if (name == \"LowCardinality\") {\n return TypeAst::LowCardinality;\n }\n\n if (name == \"SimpleAggregateFunction\") {\n return TypeAst::SimpleAggregateFunction;\n }\n\n if (name == \"Map\") {\n return TypeAst::Map;\n }\n\n return TypeAst::Terminal;\n}\n\nbool ValidateAST(const TypeAst& ast) {\n // Void terminal that is not actually \"void\" produced when unknown type is encountered.\n if (ast.meta == TypeAst::Terminal\n && ast.code == Type::Void\n && CompateStringsCaseInsensitive(ast.name, std::string_view(\"void\")) != 0)\n //throw UnimplementedError(\"Unsupported type: \" + ast.name);\n return false;\n\n return true;\n}\n\n\nTypeParser::TypeParser(const StringView& name)\n : cur_(name.data())\n , end_(name.data() + name.size())\n , type_(nullptr)\n{\n}\n\nTypeParser::~TypeParser() = default;\n\nbool TypeParser::Parse(TypeAst* type) {\n type_ = type;\n open_elements_.push(type_);\n\n size_t processed_tokens = 0;\n do {\n const Token & token = NextToken();\n switch (token.type) {\n case Token::QuotedString:\n {\n type_->meta = TypeAst::Terminal;\n if (token.value.length() < 1)\n type_->value_string = {};\n else\n type_->value_string = token.value.substr(1, token.value.length() - 2).to_string();\n type_->code = Type::String;\n break;\n }\n case Token::Name:\n type_->meta = GetTypeMeta(token.value);\n type_->name = token.value.to_string();\n type_->code = GetTypeCode(type_->name);\n break;\n case Token::Number:\n type_->meta = TypeAst::Number;\n type_->value = std::stol(token.value.to_string());\n break;\n case Token::String:\n type_->meta = TypeAst::String;\n type_->value_string = std::string(token.value);\n break;\n case Token::LPar:\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::RPar:\n type_ = open_elements_.top();\n open_elements_.pop();\n break;\n case Token::Assign:\n case Token::Comma:\n type_ = open_elements_.top();\n open_elements_.pop();\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::EOS:\n {\n // Ubalanced braces, brackets, etc is an error.\n if (open_elements_.size() != 1)\n return false;\n\n // Empty input string, no tokens produced\n if (processed_tokens == 0)\n return false;\n\n return ValidateAST(*type);\n }\n case Token::Invalid:\n return false;\n }\n ++processed_tokens;\n } while (true);\n}\n\nTypeParser::Token TypeParser::NextToken() {\n for (; cur_ < end_; ++cur_) {\n switch (*cur_) {\n case ' ':\n case '\\n':\n case '\\t':\n case '\\0':\n continue;\n case '=':\n return Token{Token::Assign, StringView(cur_++, 1)};\n case '(':\n return Token{Token::LPar, StringView(cur_++, 1)};\n case ')':\n return Token{Token::RPar, StringView(cur_++, 1)};\n case ',':\n return Token{Token::Comma, StringView(cur_++, 1)};\n case '\\'':\n {\n const auto end_quote_length = 1;\n const StringView end_quote{cur_, end_quote_length};\n // Fast forward to the closing quote.\n const auto start = cur_++;\n for (; cur_ < end_ - end_quote_length; ++cur_) {\n // TODO (nemkov): handle escaping ?\n if (end_quote == StringView{cur_, end_quote_length}) {\n cur_ += end_quote_length;\n\n return Token{Token::QuotedString, StringView{start, cur_}};\n }\n }\n return Token{Token::QuotedString, StringView(cur_++, 1)};\n }\n\n default: {\n const char* st = cur_;\n\n if (*cur_ == '\\'') {\n for (st = ++cur_; cur_ < end_; ++cur_) {\n if (*cur_ == '\\'') {\n return Token{Token::String, StringView(st, cur_++ - st)};\n }\n }\n\n return Token{Token::Invalid, StringView()};\n }\n\n if (isalpha(*cur_) || *cur_ == '_') {\n for (; cur_ < end_; ++cur_) {\n if (!isalpha(*cur_) && !isdigit(*cur_) && *cur_ != '_') {\n break;\n }\n }\n\n return Token{Token::Name, StringView(st, cur_)};\n }\n\n if (isdigit(*cur_) || *cur_ == '-') {\n for (++cur_; cur_ < end_; ++cur_) {\n if (!isdigit(*cur_)) {\n break;\n }\n }\n\n return Token{Token::Number, StringView(st, cur_)};\n }\n\n return Token{Token::Invalid, StringView()};\n }\n }\n }\n\n return Token{Token::EOS, StringView()};\n}\n\n\nconst TypeAst* ParseTypeName(const std::string& type_name) {\n // Cache for type_name.\n // Usually we won't have too many type names in the cache, so do not try to\n // limit cache size.\n static std::map ast_cache;\n static std::mutex lock;\n\n std::lock_guard guard(lock);\n auto it = ast_cache.find(type_name);\n if (it != ast_cache.end()) {\n return &it->second;\n }\n\n auto& ast = ast_cache[type_name];\n if (TypeParser(type_name).Parse(&ast)) {\n return *\n }\n ast_cache.erase(type_name);\n return nullptr;\n}\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_24_chunk_0", + "original_index": 0, + "content": "#include \"type_parser.h\"\n\n#include \"clickhouse/exceptions.h\"\n#include \"clickhouse/base/platform.h\" // for _win_\n\n#include \n#include \n#include \n#include \n#include \n\n#if defined _win_\n#include \n#else\n#include \n#endif\n\n\nnamespace clickhouse {\n\nbool TypeAst::operator==(const TypeAst & other) const {\n return meta == other.meta\n && code == other.code\n && name == other.name\n && value == other.value\n && std::equal(elements.begin(), elements.end(), other.elements.begin(), other.elements.end());\n}\n\n" + }, + { + "chunk_id": "doc_24_chunk_1", + "original_index": 1, + "content": "static const std::unordered_map kTypeCode = {\n { \"Void\", Type::Void },\n { \"Int8\", Type::Int8 },\n { \"Int16\", Type::Int16 },\n { \"Int32\", Type::Int32 },\n { \"Int64\", Type::Int64 },\n { \"Bool\", Type::UInt8 },\n { \"UInt8\", Type::UInt8 },\n { \"UInt16\", Type::UInt16 },\n { \"UInt32\", Type::UInt32 },\n { \"UInt64\", Type::UInt64 },\n { \"Float32\", Type::Float32 },\n { \"Float64\", Type::Float64 },\n { \"String\", Type::String },\n { \"FixedString\", Type::FixedString },\n { \"DateTime\", Type::DateTime },\n { \"DateTime64\", Type::DateTime64 },\n" + }, + { + "chunk_id": "doc_24_chunk_2", + "original_index": 2, + "content": " { \"Date\", Type::Date },\n { \"Date32\", Type::Date32 },\n { \"Array\", Type::Array },\n { \"Nullable\", Type::Nullable },\n { \"Tuple\", Type::Tuple },\n { \"Enum8\", Type::Enum8 },\n { \"Enum16\", Type::Enum16 },\n { \"UUID\", Type::UUID },\n { \"IPv4\", Type::IPv4 },\n { \"IPv6\", Type::IPv6 },\n { \"Int128\", Type::Int128 },\n// { \"UInt128\", Type::UInt128 },\n { \"Decimal\", Type::Decimal },\n { \"Decimal32\", Type::Decimal32 },\n { \"Decimal64\", Type::Decimal64 },\n { \"Decimal128\", Type::Decimal128 },\n { \"LowCardinality\", Type::LowCardinality },\n { \"Map\", Type::Map },\n { \"Point\", Type::Point },\n { \"Ring\", Type::Ring },\n { \"Polygon\", Type::Polygon },\n { \"MultiPolygon\", Type::MultiPolygon },\n};\n\n" + }, + { + "chunk_id": "doc_24_chunk_3", + "original_index": 3, + "content": "template \ninline int CompateStringsCaseInsensitive(const L& left, const R& right) {\n int64_t size_diff = left.size() - right.size();\n if (size_diff != 0)\n return size_diff > 0 ? 1 : -1;\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n}\n\nstatic Type::Code GetTypeCode(const std::string& name) {\n auto it = kTypeCode.find(name);\n if (it != kTypeCode.end()) {\n return it->second;\n }\n\n return Type::Void;\n}\n\nstatic TypeAst::Meta GetTypeMeta(const StringView& name) {\n if (name == \"Array\") {\n return TypeAst::Array;\n }\n\n" + }, + { + "chunk_id": "doc_24_chunk_4", + "original_index": 4, + "content": " if (name == \"Null\") {\n return TypeAst::Null;\n }\n\n if (name == \"Nullable\") {\n return TypeAst::Nullable;\n }\n\n if (name == \"Tuple\") {\n return TypeAst::Tuple;\n }\n\n if (name == \"Enum8\" || name == \"Enum16\") {\n return TypeAst::Enum;\n }\n\n if (name == \"LowCardinality\") {\n return TypeAst::LowCardinality;\n }\n\n if (name == \"SimpleAggregateFunction\") {\n return TypeAst::SimpleAggregateFunction;\n }\n\n if (name == \"Map\") {\n return TypeAst::Map;\n }\n\n return TypeAst::Terminal;\n}\n\n" + }, + { + "chunk_id": "doc_24_chunk_5", + "original_index": 5, + "content": "bool ValidateAST(const TypeAst& ast) {\n // Void terminal that is not actually \"void\" produced when unknown type is encountered.\n if (ast.meta == TypeAst::Terminal\n && ast.code == Type::Void\n && CompateStringsCaseInsensitive(ast.name, std::string_view(\"void\")) != 0)\n //throw UnimplementedError(\"Unsupported type: \" + ast.name);\n return false;\n\n return true;\n}\n\n\nTypeParser::TypeParser(const StringView& name)\n : cur_(name.data())\n , end_(name.data() + name.size())\n , type_(nullptr)\n{\n}\n\nTypeParser::~TypeParser() = default;\n\nbool TypeParser::Parse(TypeAst* type) {\n type_ = type;\n open_elements_.push(type_);\n\n" + }, + { + "chunk_id": "doc_24_chunk_6", + "original_index": 6, + "content": " size_t processed_tokens = 0;\n do {\n const Token & token = NextToken();\n switch (token.type) {\n case Token::QuotedString:\n {\n type_->meta = TypeAst::Terminal;\n if (token.value.length() < 1)\n type_->value_string = {};\n else\n type_->value_string = token.value.substr(1, token.value.length() - 2).to_string();\n" + }, + { + "chunk_id": "doc_24_chunk_7", + "original_index": 7, + "content": " type_->code = Type::String;\n break;\n }\n case Token::Name:\n type_->meta = GetTypeMeta(token.value);\n type_->name = token.value.to_string();\n type_->code = GetTypeCode(type_->name);\n break;\n case Token::Number:\n type_->meta = TypeAst::Number;\n type_->value = std::stol(token.value.to_string());\n break;\n case Token::String:\n type_->meta = TypeAst::String;\n type_->value_string = std::string(token.value);\n" + }, + { + "chunk_id": "doc_24_chunk_8", + "original_index": 8, + "content": " break;\n case Token::LPar:\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::RPar:\n type_ = open_elements_.top();\n open_elements_.pop();\n break;\n case Token::Assign:\n case Token::Comma:\n type_ = open_elements_.top();\n open_elements_.pop();\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::EOS:\n {\n // Ubalanced braces, brackets, etc is an error.\n if (open_elements_.size() != 1)\n return false;\n\n" + }, + { + "chunk_id": "doc_24_chunk_9", + "original_index": 9, + "content": " // Empty input string, no tokens produced\n if (processed_tokens == 0)\n return false;\n\n return ValidateAST(*type);\n }\n case Token::Invalid:\n return false;\n }\n ++processed_tokens;\n } while (true);\n}\n\nTypeParser::Token TypeParser::NextToken() {\n for (; cur_ < end_; ++cur_) {\n switch (*cur_) {\n case ' ':\n case '\\n':\n case '\\t':\n case '\\0':\n continue;\n case '=':\n return Token{Token::Assign, StringView(cur_++, 1)};\n case '(':\n return Token{Token::LPar, StringView(cur_++, 1)};\n case ')':\n return Token{Token::RPar, StringView(cur_++, 1)};\n" + }, + { + "chunk_id": "doc_24_chunk_10", + "original_index": 10, + "content": " case ',':\n return Token{Token::Comma, StringView(cur_++, 1)};\n case '\\'':\n {\n const auto end_quote_length = 1;\n const StringView end_quote{cur_, end_quote_length};\n // Fast forward to the closing quote.\n const auto start = cur_++;\n for (; cur_ < end_ - end_quote_length; ++cur_) {\n // TODO (nemkov): handle escaping ?\n if (end_quote == StringView{cur_, end_quote_length}) {\n cur_ += end_quote_length;\n\n" + }, + { + "chunk_id": "doc_24_chunk_11", + "original_index": 11, + "content": " return Token{Token::QuotedString, StringView{start, cur_}};\n }\n }\n return Token{Token::QuotedString, StringView(cur_++, 1)};\n }\n\n default: {\n const char* st = cur_;\n\n if (*cur_ == '\\'') {\n for (st = ++cur_; cur_ < end_; ++cur_) {\n if (*cur_ == '\\'') {\n return Token{Token::String, StringView(st, cur_++ - st)};\n }\n }\n\n" + }, + { + "chunk_id": "doc_24_chunk_12", + "original_index": 12, + "content": " return Token{Token::Invalid, StringView()};\n }\n\n if (isalpha(*cur_) || *cur_ == '_') {\n for (; cur_ < end_; ++cur_) {\n if (!isalpha(*cur_) && !isdigit(*cur_) && *cur_ != '_') {\n break;\n }\n }\n\n return Token{Token::Name, StringView(st, cur_)};\n }\n\n if (isdigit(*cur_) || *cur_ == '-') {\n for (++cur_; cur_ < end_; ++cur_) {\n if (!isdigit(*cur_)) {\n break;\n }\n }\n\n" + }, + { + "chunk_id": "doc_24_chunk_13", + "original_index": 13, + "content": " return Token{Token::Number, StringView(st, cur_)};\n }\n\n return Token{Token::Invalid, StringView()};\n }\n }\n }\n\n return Token{Token::EOS, StringView()};\n}\n\n\nconst TypeAst* ParseTypeName(const std::string& type_name) {\n // Cache for type_name.\n // Usually we won't have too many type names in the cache, so do not try to\n // limit cache size.\n static std::map ast_cache;\n static std::mutex lock;\n\n std::lock_guard guard(lock);\n auto it = ast_cache.find(type_name);\n if (it != ast_cache.end()) {\n return &it->second;\n }\n\n auto& ast = ast_cache[type_name];\n if (TypeParser(type_name).Parse(&ast)) {\n return *\n }\n ast_cache.erase(type_name);\n return nullptr;\n}\n\n}\n" + } + ] + }, + { + "doc_id": "doc_25", + "original_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", + "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n// * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n// * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the public API for death tests. It is\n// #included by gtest.h so a user doesn't need to include this\n// directly.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n#define GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n\n#include \"gtest/internal/gtest-death-test-internal.h\"\n\nnamespace testing {\n\n// This flag controls the style of death tests. Valid values are \"threadsafe\",\n// meaning that the death test child process will re-execute the test binary\n// from the start, running only a single death test, or \"fast\",\n// meaning that the child process will execute the test logic immediately\n// after forking.\nGTEST_DECLARE_string_(death_test_style);\n\n#if GTEST_HAS_DEATH_TEST\n\nnamespace internal {\n\n// Returns a Boolean value indicating whether the caller is currently\n// executing in the context of the death test child process. Tools such as\n// Valgrind heap checkers may need this to modify their behavior in death\n// tests. IMPORTANT: This is an internal utility. Using it may break the\n// implementation of death tests. User code MUST NOT use it.\nGTEST_API_ bool InDeathTestChild();\n\n} // namespace internal\n\n// The following macros are useful for writing death tests.\n\n// Here's what happens when an ASSERT_DEATH* or EXPECT_DEATH* is\n// executed:\n//\n// 1. It generates a warning if there is more than one active\n// thread. This is because it's safe to fork() or clone() only\n// when there is a single thread.\n//\n// 2. The parent process clone()s a sub-process and runs the death\n// test in it; the sub-process exits with code 0 at the end of the\n// death test, if it hasn't exited already.\n//\n// 3. The parent process waits for the sub-process to terminate.\n//\n// 4. The parent process checks the exit code and error message of\n// the sub-process.\n//\n// Examples:\n//\n// ASSERT_DEATH(server.SendMessage(56, \"Hello\"), \"Invalid port number\");\n// for (int i = 0; i < 5; i++) {\n// EXPECT_DEATH(server.ProcessRequest(i),\n// \"Invalid request .* in ProcessRequest()\")\n// << \"Failed to die on request \" << i;\n// }\n//\n// ASSERT_EXIT(server.ExitNow(), ::testing::ExitedWithCode(0), \"Exiting\");\n//\n// bool KilledBySIGHUP(int exit_code) {\n// return WIFSIGNALED(exit_code) && WTERMSIG(exit_code) == SIGHUP;\n// }\n//\n// ASSERT_EXIT(client.HangUpServer(), KilledBySIGHUP, \"Hanging up!\");\n//\n// The final parameter to each of these macros is a matcher applied to any data\n// the sub-process wrote to stderr. For compatibility with existing tests, a\n// bare string is interpreted as a regular expression matcher.\n//\n// On the regular expressions used in death tests:\n//\n// GOOGLETEST_CM0005 DO NOT DELETE\n// On POSIX-compliant systems (*nix), we use the library,\n// which uses the POSIX extended regex syntax.\n//\n// On other platforms (e.g. Windows or Mac), we only support a simple regex\n// syntax implemented as part of Google Test. This limited\n// implementation should be enough most of the time when writing\n// death tests; though it lacks many features you can find in PCRE\n// or POSIX extended regex syntax. For example, we don't support\n// union (\"x|y\"), grouping (\"(xy)\"), brackets (\"[xy]\"), and\n// repetition count (\"x{5,7}\"), among others.\n//\n// Below is the syntax that we do support. We chose it to be a\n// subset of both PCRE and POSIX extended regex, so it's easy to\n// learn wherever you come from. In the following: 'A' denotes a\n// literal character, period (.), or a single \\\\ escape sequence;\n// 'x' and 'y' denote regular expressions; 'm' and 'n' are for\n// natural numbers.\n//\n// c matches any literal character c\n// \\\\d matches any decimal digit\n// \\\\D matches any character that's not a decimal digit\n// \\\\f matches \\f\n// \\\\n matches \\n\n// \\\\r matches \\r\n// \\\\s matches any ASCII whitespace, including \\n\n// \\\\S matches any character that's not a whitespace\n// \\\\t matches \\t\n// \\\\v matches \\v\n// \\\\w matches any letter, _, or decimal digit\n// \\\\W matches any character that \\\\w doesn't match\n// \\\\c matches any literal character c, which must be a punctuation\n// . matches any single character except \\n\n// A? matches 0 or 1 occurrences of A\n// A* matches 0 or many occurrences of A\n// A+ matches 1 or many occurrences of A\n// ^ matches the beginning of a string (not that of each line)\n// $ matches the end of a string (not that of each line)\n// xy matches x followed by y\n//\n// If you accidentally use PCRE or POSIX extended regex features\n// not implemented by us, you will get a run-time failure. In that\n// case, please try to rewrite your regular expression within the\n// above syntax.\n//\n// This implementation is *not* meant to be as highly tuned or robust\n// as a compiled regex library, but should perform well enough for a\n// death test, which already incurs significant overhead by launching\n// a child process.\n//\n// Known caveats:\n//\n// A \"threadsafe\" style death test obtains the path to the test\n// program from argv[0] and re-executes it in the sub-process. For\n// simplicity, the current implementation doesn't search the PATH\n// when launching the sub-process. This means that the user must\n// invoke the test program via a path that contains at least one\n// path separator (e.g. path/to/foo_test and\n// /absolute/path/to/bar_test are fine, but foo_test is not). This\n// is rarely a problem as people usually don't put the test binary\n// directory in PATH.\n//\n\n// Asserts that a given `statement` causes the program to exit, with an\n// integer exit status that satisfies `predicate`, and emitting error output\n// that matches `matcher`.\n# define ASSERT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_FATAL_FAILURE_)\n\n// Like `ASSERT_EXIT`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_NONFATAL_FAILURE_)\n\n// Asserts that a given `statement` causes the program to exit, either by\n// explicitly exiting with a nonzero exit code or being killed by a\n// signal, and emitting error output that matches `matcher`.\n# define ASSERT_DEATH(statement, matcher) \\\n ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Like `ASSERT_DEATH`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_DEATH(statement, matcher) \\\n EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*:\n\n// Tests that an exit code describes a normal exit with a given exit code.\nclass GTEST_API_ ExitedWithCode {\n public:\n explicit ExitedWithCode(int exit_code);\n ExitedWithCode(const ExitedWithCode&) = default;\n void operator=(const ExitedWithCode& other) = delete;\n bool operator()(int exit_status) const;\n private:\n const int exit_code_;\n};\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// Tests that an exit code describes an exit due to termination by a\n// given signal.\n// GOOGLETEST_CM0006 DO NOT DELETE\nclass GTEST_API_ KilledBySignal {\n public:\n explicit KilledBySignal(int signum);\n bool operator()(int exit_status) const;\n private:\n const int signum_;\n};\n# endif // !GTEST_OS_WINDOWS\n\n// EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode.\n// The death testing framework causes this to have interesting semantics,\n// since the sideeffects of the call are only visible in opt mode, and not\n// in debug mode.\n//\n// In practice, this can be used to test functions that utilize the\n// LOG(DFATAL) macro using the following style:\n//\n// int DieInDebugOr12(int* sideeffect) {\n// if (sideeffect) {\n// *sideeffect = 12;\n// }\n// LOG(DFATAL) << \"death\";\n// return 12;\n// }\n//\n// TEST(TestSuite, TestDieOr12WorksInDgbAndOpt) {\n// int sideeffect = 0;\n// // Only asserts in dbg.\n// EXPECT_DEBUG_DEATH(DieInDebugOr12(&sideeffect), \"death\");\n//\n// #ifdef NDEBUG\n// // opt-mode has sideeffect visible.\n// EXPECT_EQ(12, sideeffect);\n// #else\n// // dbg-mode no visible sideeffect.\n// EXPECT_EQ(0, sideeffect);\n// #endif\n// }\n//\n// This will assert that DieInDebugReturn12InOpt() crashes in debug\n// mode, usually due to a DCHECK or LOG(DFATAL), but returns the\n// appropriate fallback value (12 in this case) in opt mode. If you\n// need to test that a function has appropriate side-effects in opt\n// mode, include assertions against the side-effects. A general\n// pattern for this is:\n//\n// EXPECT_DEBUG_DEATH({\n// // Side-effects here will have an effect after this statement in\n// // opt mode, but none in debug mode.\n// EXPECT_EQ(12, DieInDebugOr12(&sideeffect));\n// }, \"death\");\n//\n# ifdef NDEBUG\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# else\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n\n# endif // NDEBUG for EXPECT_DEBUG_DEATH\n#endif // GTEST_HAS_DEATH_TEST\n\n// This macro is used for implementing macros such as\n// EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED on systems where\n// death tests are not supported. Those macros must compile on such systems\n// if and only if EXPECT_DEATH and ASSERT_DEATH compile with the same parameters\n// on systems that support death tests. This allows one to write such a macro on\n// a system that does not support death tests and be sure that it will compile\n// on a death-test supporting system. It is exposed publicly so that systems\n// that have death-tests with stricter requirements than GTEST_HAS_DEATH_TEST\n// can write their own equivalent of EXPECT_DEATH_IF_SUPPORTED and\n// ASSERT_DEATH_IF_SUPPORTED.\n//\n// Parameters:\n// statement - A statement that a macro such as EXPECT_DEATH would test\n// for program termination. This macro has to make sure this\n// statement is compiled but not executed, to ensure that\n// EXPECT_DEATH_IF_SUPPORTED compiles with a certain\n// parameter if and only if EXPECT_DEATH compiles with it.\n// regex - A regex that a macro such as EXPECT_DEATH would use to test\n// the output of statement. This parameter has to be\n// compiled but not evaluated by this macro, to ensure that\n// this macro only accepts expressions that a macro such as\n// EXPECT_DEATH would accept.\n// terminator - Must be an empty statement for EXPECT_DEATH_IF_SUPPORTED\n// and a return statement for ASSERT_DEATH_IF_SUPPORTED.\n// This ensures that ASSERT_DEATH_IF_SUPPORTED will not\n// compile inside functions where ASSERT_DEATH doesn't\n// compile.\n//\n// The branch that has an always false condition is used to ensure that\n// statement and regex are compiled (and thus syntactically correct) but\n// never executed. The unreachable code macro protects the terminator\n// statement from generating an 'unreachable code' warning in case\n// statement unconditionally returns or throws. The Message constructor at\n// the end allows the syntax of streaming additional messages into the\n// macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH.\n# define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \\\n GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n if (::testing::internal::AlwaysTrue()) { \\\n GTEST_LOG_(WARNING) \\\n << \"Death tests are not supported on this platform.\\n\" \\\n << \"Statement '\" #statement \"' cannot be verified.\"; \\\n } else if (::testing::internal::AlwaysFalse()) { \\\n ::testing::internal::RE::PartialMatch(\".*\", (regex)); \\\n GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n terminator; \\\n } else \\\n ::testing::Message()\n\n// EXPECT_DEATH_IF_SUPPORTED(statement, regex) and\n// ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if\n// death tests are supported; otherwise they just issue a warning. This is\n// useful when you are combining death test assertions with normal test\n// assertions in one test.\n#if GTEST_HAS_DEATH_TEST\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n#else\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, )\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return)\n#endif\n\n} // namespace testing\n\n#endif // GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n", + "chunks": [ + { + "chunk_id": "doc_25_chunk_0", + "original_index": 0, + "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n// * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n// * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n" + }, + { + "chunk_id": "doc_25_chunk_1", + "original_index": 1, + "content": "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n" + }, + { + "chunk_id": "doc_25_chunk_2", + "original_index": 2, + "content": "//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the public API for death tests. It is\n// #included by gtest.h so a user doesn't need to include this\n// directly.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n#define GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n\n#include \"gtest/internal/gtest-death-test-internal.h\"\n\nnamespace testing {\n\n// This flag controls the style of death tests. Valid values are \"threadsafe\",\n// meaning that the death test child process will re-execute the test binary\n// from the start, running only a single death test, or \"fast\",\n// meaning that the child process will execute the test logic immediately\n// after forking.\nGTEST_DECLARE_string_(death_test_style);\n\n#if GTEST_HAS_DEATH_TEST\n\nnamespace internal {\n\n" + }, + { + "chunk_id": "doc_25_chunk_3", + "original_index": 3, + "content": "// Returns a Boolean value indicating whether the caller is currently\n// executing in the context of the death test child process. Tools such as\n// Valgrind heap checkers may need this to modify their behavior in death\n// tests. IMPORTANT: This is an internal utility. Using it may break the\n// implementation of death tests. User code MUST NOT use it.\nGTEST_API_ bool InDeathTestChild();\n\n} // namespace internal\n\n// The following macros are useful for writing death tests.\n\n// Here's what happens when an ASSERT_DEATH* or EXPECT_DEATH* is\n// executed:\n//\n// 1. It generates a warning if there is more than one active\n// thread. This is because it's safe to fork() or clone() only\n// when there is a single thread.\n//\n// 2. The parent process clone()s a sub-process and runs the death\n// test in it; the sub-process exits with code 0 at the end of the\n// death test, if it hasn't exited already.\n//\n" + }, + { + "chunk_id": "doc_25_chunk_4", + "original_index": 4, + "content": "// 3. The parent process waits for the sub-process to terminate.\n//\n// 4. The parent process checks the exit code and error message of\n// the sub-process.\n//\n// Examples:\n//\n// ASSERT_DEATH(server.SendMessage(56, \"Hello\"), \"Invalid port number\");\n// for (int i = 0; i < 5; i++) {\n// EXPECT_DEATH(server.ProcessRequest(i),\n// \"Invalid request .* in ProcessRequest()\")\n// << \"Failed to die on request \" << i;\n// }\n//\n// ASSERT_EXIT(server.ExitNow(), ::testing::ExitedWithCode(0), \"Exiting\");\n//\n// bool KilledBySIGHUP(int exit_code) {\n// return WIFSIGNALED(exit_code) && WTERMSIG(exit_code) == SIGHUP;\n// }\n//\n// ASSERT_EXIT(client.HangUpServer(), KilledBySIGHUP, \"Hanging up!\");\n//\n// The final parameter to each of these macros is a matcher applied to any data\n" + }, + { + "chunk_id": "doc_25_chunk_5", + "original_index": 5, + "content": "// the sub-process wrote to stderr. For compatibility with existing tests, a\n// bare string is interpreted as a regular expression matcher.\n//\n// On the regular expressions used in death tests:\n//\n// GOOGLETEST_CM0005 DO NOT DELETE\n// On POSIX-compliant systems (*nix), we use the library,\n// which uses the POSIX extended regex syntax.\n//\n// On other platforms (e.g. Windows or Mac), we only support a simple regex\n// syntax implemented as part of Google Test. This limited\n// implementation should be enough most of the time when writing\n// death tests; though it lacks many features you can find in PCRE\n// or POSIX extended regex syntax. For example, we don't support\n" + }, + { + "chunk_id": "doc_25_chunk_6", + "original_index": 6, + "content": "// union (\"x|y\"), grouping (\"(xy)\"), brackets (\"[xy]\"), and\n// repetition count (\"x{5,7}\"), among others.\n//\n// Below is the syntax that we do support. We chose it to be a\n// subset of both PCRE and POSIX extended regex, so it's easy to\n// learn wherever you come from. In the following: 'A' denotes a\n// literal character, period (.), or a single \\\\ escape sequence;\n// 'x' and 'y' denote regular expressions; 'm' and 'n' are for\n" + }, + { + "chunk_id": "doc_25_chunk_7", + "original_index": 7, + "content": "// natural numbers.\n//\n// c matches any literal character c\n// \\\\d matches any decimal digit\n// \\\\D matches any character that's not a decimal digit\n// \\\\f matches \\f\n// \\\\n matches \\n\n// \\\\r matches \\r\n// \\\\s matches any ASCII whitespace, including \\n\n// \\\\S matches any character that's not a whitespace\n// \\\\t matches \\t\n// \\\\v matches \\v\n// \\\\w matches any letter, _, or decimal digit\n// \\\\W matches any character that \\\\w doesn't match\n// \\\\c matches any literal character c, which must be a punctuation\n// . matches any single character except \\n\n" + }, + { + "chunk_id": "doc_25_chunk_8", + "original_index": 8, + "content": "// A? matches 0 or 1 occurrences of A\n// A* matches 0 or many occurrences of A\n// A+ matches 1 or many occurrences of A\n// ^ matches the beginning of a string (not that of each line)\n// $ matches the end of a string (not that of each line)\n// xy matches x followed by y\n//\n// If you accidentally use PCRE or POSIX extended regex features\n// not implemented by us, you will get a run-time failure. In that\n" + }, + { + "chunk_id": "doc_25_chunk_9", + "original_index": 9, + "content": "// case, please try to rewrite your regular expression within the\n// above syntax.\n//\n// This implementation is *not* meant to be as highly tuned or robust\n// as a compiled regex library, but should perform well enough for a\n// death test, which already incurs significant overhead by launching\n// a child process.\n//\n// Known caveats:\n//\n// A \"threadsafe\" style death test obtains the path to the test\n// program from argv[0] and re-executes it in the sub-process. For\n// simplicity, the current implementation doesn't search the PATH\n// when launching the sub-process. This means that the user must\n// invoke the test program via a path that contains at least one\n// path separator (e.g. path/to/foo_test and\n// /absolute/path/to/bar_test are fine, but foo_test is not). This\n// is rarely a problem as people usually don't put the test binary\n// directory in PATH.\n//\n\n" + }, + { + "chunk_id": "doc_25_chunk_10", + "original_index": 10, + "content": "// Asserts that a given `statement` causes the program to exit, with an\n// integer exit status that satisfies `predicate`, and emitting error output\n// that matches `matcher`.\n# define ASSERT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_FATAL_FAILURE_)\n\n// Like `ASSERT_EXIT`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_NONFATAL_FAILURE_)\n\n" + }, + { + "chunk_id": "doc_25_chunk_11", + "original_index": 11, + "content": "// Asserts that a given `statement` causes the program to exit, either by\n// explicitly exiting with a nonzero exit code or being killed by a\n// signal, and emitting error output that matches `matcher`.\n# define ASSERT_DEATH(statement, matcher) \\\n ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Like `ASSERT_DEATH`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_DEATH(statement, matcher) \\\n EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*:\n\n" + }, + { + "chunk_id": "doc_25_chunk_12", + "original_index": 12, + "content": "// Tests that an exit code describes a normal exit with a given exit code.\nclass GTEST_API_ ExitedWithCode {\n public:\n explicit ExitedWithCode(int exit_code);\n ExitedWithCode(const ExitedWithCode&) = default;\n void operator=(const ExitedWithCode& other) = delete;\n bool operator()(int exit_status) const;\n private:\n const int exit_code_;\n};\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// Tests that an exit code describes an exit due to termination by a\n// given signal.\n// GOOGLETEST_CM0006 DO NOT DELETE\nclass GTEST_API_ KilledBySignal {\n public:\n explicit KilledBySignal(int signum);\n bool operator()(int exit_status) const;\n private:\n const int signum_;\n};\n# endif // !GTEST_OS_WINDOWS\n\n" + }, + { + "chunk_id": "doc_25_chunk_13", + "original_index": 13, + "content": "// EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode.\n// The death testing framework causes this to have interesting semantics,\n// since the sideeffects of the call are only visible in opt mode, and not\n// in debug mode.\n//\n// In practice, this can be used to test functions that utilize the\n// LOG(DFATAL) macro using the following style:\n//\n// int DieInDebugOr12(int* sideeffect) {\n// if (sideeffect) {\n// *sideeffect = 12;\n// }\n" + }, + { + "chunk_id": "doc_25_chunk_14", + "original_index": 14, + "content": "// LOG(DFATAL) << \"death\";\n// return 12;\n// }\n//\n// TEST(TestSuite, TestDieOr12WorksInDgbAndOpt) {\n// int sideeffect = 0;\n// // Only asserts in dbg.\n// EXPECT_DEBUG_DEATH(DieInDebugOr12(&sideeffect), \"death\");\n//\n// #ifdef NDEBUG\n// // opt-mode has sideeffect visible.\n// EXPECT_EQ(12, sideeffect);\n// #else\n// // dbg-mode no visible sideeffect.\n// EXPECT_EQ(0, sideeffect);\n// #endif\n// }\n//\n// This will assert that DieInDebugReturn12InOpt() crashes in debug\n// mode, usually due to a DCHECK or LOG(DFATAL), but returns the\n// appropriate fallback value (12 in this case) in opt mode. If you\n// need to test that a function has appropriate side-effects in opt\n// mode, include assertions against the side-effects. A general\n// pattern for this is:\n//\n// EXPECT_DEBUG_DEATH({\n// // Side-effects here will have an effect after this statement in\n// // opt mode, but none in debug mode.\n// EXPECT_EQ(12, DieInDebugOr12(&sideeffect));\n// }, \"death\");\n//\n# ifdef NDEBUG\n\n" + }, + { + "chunk_id": "doc_25_chunk_15", + "original_index": 15, + "content": "# define EXPECT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# else\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n\n# endif // NDEBUG for EXPECT_DEBUG_DEATH\n#endif // GTEST_HAS_DEATH_TEST\n\n" + }, + { + "chunk_id": "doc_25_chunk_16", + "original_index": 16, + "content": "// This macro is used for implementing macros such as\n// EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED on systems where\n// death tests are not supported. Those macros must compile on such systems\n// if and only if EXPECT_DEATH and ASSERT_DEATH compile with the same parameters\n// on systems that support death tests. This allows one to write such a macro on\n// a system that does not support death tests and be sure that it will compile\n// on a death-test supporting system. It is exposed publicly so that systems\n// that have death-tests with stricter requirements than GTEST_HAS_DEATH_TEST\n" + }, + { + "chunk_id": "doc_25_chunk_17", + "original_index": 17, + "content": "// can write their own equivalent of EXPECT_DEATH_IF_SUPPORTED and\n// ASSERT_DEATH_IF_SUPPORTED.\n//\n// Parameters:\n// statement - A statement that a macro such as EXPECT_DEATH would test\n// for program termination. This macro has to make sure this\n// statement is compiled but not executed, to ensure that\n// EXPECT_DEATH_IF_SUPPORTED compiles with a certain\n// parameter if and only if EXPECT_DEATH compiles with it.\n// regex - A regex that a macro such as EXPECT_DEATH would use to test\n" + }, + { + "chunk_id": "doc_25_chunk_18", + "original_index": 18, + "content": "// the output of statement. This parameter has to be\n// compiled but not evaluated by this macro, to ensure that\n// this macro only accepts expressions that a macro such as\n// EXPECT_DEATH would accept.\n// terminator - Must be an empty statement for EXPECT_DEATH_IF_SUPPORTED\n// and a return statement for ASSERT_DEATH_IF_SUPPORTED.\n// This ensures that ASSERT_DEATH_IF_SUPPORTED will not\n// compile inside functions where ASSERT_DEATH doesn't\n// compile.\n//\n// The branch that has an always false condition is used to ensure that\n// statement and regex are compiled (and thus syntactically correct) but\n// never executed. The unreachable code macro protects the terminator\n// statement from generating an 'unreachable code' warning in case\n// statement unconditionally returns or throws. The Message constructor at\n" + }, + { + "chunk_id": "doc_25_chunk_19", + "original_index": 19, + "content": "// the end allows the syntax of streaming additional messages into the\n// macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH.\n# define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \\\n GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n if (::testing::internal::AlwaysTrue()) { \\\n GTEST_LOG_(WARNING) \\\n << \"Death tests are not supported on this platform.\\n\" \\\n << \"Statement '\" #statement \"' cannot be verified.\"; \\\n } else if (::testing::internal::AlwaysFalse()) { \\\n ::testing::internal::RE::PartialMatch(\".*\", (regex)); \\\n GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n terminator; \\\n } else \\\n ::testing::Message()\n\n" + }, + { + "chunk_id": "doc_25_chunk_20", + "original_index": 20, + "content": "// EXPECT_DEATH_IF_SUPPORTED(statement, regex) and\n// ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if\n// death tests are supported; otherwise they just issue a warning. This is\n// useful when you are combining death test assertions with normal test\n// assertions in one test.\n#if GTEST_HAS_DEATH_TEST\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n#else\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, )\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return)\n#endif\n\n} // namespace testing\n\n#endif // GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n" + } + ] + }, + { + "doc_id": "doc_26", + "original_uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", + "content": "#pragma once\n\n#include \n#include \n\nnamespace clickhouse {\n\nclass InputStream;\nclass OutputStream;\n\nclass WireFormat {\npublic:\n template \n static bool ReadFixed(InputStream& input, T* value);\n static bool ReadString(InputStream& input, std::string* value);\n static bool SkipString(InputStream& input);\n static bool ReadBytes(InputStream& input, void* buf, size_t len);\n static bool ReadUInt64(InputStream& input, uint64_t* value);\n static bool ReadVarint64(InputStream& output, uint64_t* value);\n\n template \n static void WriteFixed(OutputStream& output, const T& value);\n static void WriteBytes(OutputStream& output, const void* buf, size_t len);\n static void WriteString(OutputStream& output, std::string_view value);\n static void WriteUInt64(OutputStream& output, const uint64_t value);\n static void WriteVarint64(OutputStream& output, uint64_t value);\n\nprivate:\n static bool ReadAll(InputStream& input, void* buf, size_t len);\n static void WriteAll(OutputStream& output, const void* buf, size_t len);\n};\n\ntemplate \ninline bool WireFormat::ReadFixed(InputStream& input, T* value) {\n return ReadAll(input, value, sizeof(T));\n}\n\ninline bool WireFormat::ReadString(InputStream& input, std::string* value) {\n uint64_t len = 0;\n if (ReadVarint64(input, &len)) {\n if (len > 0x00FFFFFFULL) {\n return false;\n }\n value->resize((size_t)len);\n return ReadAll(input, value->data(), (size_t)len);\n }\n\n return false;\n}\n\ninline bool WireFormat::ReadBytes(InputStream& input, void* buf, size_t len) {\n return ReadAll(input, buf, len);\n}\n\ninline bool WireFormat::ReadUInt64(InputStream& input, uint64_t* value) {\n return ReadVarint64(input, value);\n}\n\ntemplate \ninline void WireFormat::WriteFixed(OutputStream& output, const T& value) {\n WriteAll(output, &value, sizeof(T));\n}\n\ninline void WireFormat::WriteBytes(OutputStream& output, const void* buf, size_t len) {\n WriteAll(output, buf, len);\n}\n\ninline void WireFormat::WriteString(OutputStream& output, std::string_view value) {\n WriteVarint64(output, value.size());\n WriteAll(output, value.data(), value.size());\n}\n\ninline void WireFormat::WriteUInt64(OutputStream& output, const uint64_t value) {\n WriteVarint64(output, value);\n}\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_26_chunk_0", + "original_index": 0, + "content": "#pragma once\n\n#include \n#include \n\nnamespace clickhouse {\n\nclass InputStream;\nclass OutputStream;\n\nclass WireFormat {\npublic:\n template \n static bool ReadFixed(InputStream& input, T* value);\n static bool ReadString(InputStream& input, std::string* value);\n static bool SkipString(InputStream& input);\n static bool ReadBytes(InputStream& input, void* buf, size_t len);\n static bool ReadUInt64(InputStream& input, uint64_t* value);\n static bool ReadVarint64(InputStream& output, uint64_t* value);\n\n template \n static void WriteFixed(OutputStream& output, const T& value);\n static void WriteBytes(OutputStream& output, const void* buf, size_t len);\n static void WriteString(OutputStream& output, std::string_view value);\n static void WriteUInt64(OutputStream& output, const uint64_t value);\n static void WriteVarint64(OutputStream& output, uint64_t value);\n\n" + }, + { + "chunk_id": "doc_26_chunk_1", + "original_index": 1, + "content": "private:\n static bool ReadAll(InputStream& input, void* buf, size_t len);\n static void WriteAll(OutputStream& output, const void* buf, size_t len);\n};\n\ntemplate \ninline bool WireFormat::ReadFixed(InputStream& input, T* value) {\n return ReadAll(input, value, sizeof(T));\n}\n\ninline bool WireFormat::ReadString(InputStream& input, std::string* value) {\n uint64_t len = 0;\n if (ReadVarint64(input, &len)) {\n if (len > 0x00FFFFFFULL) {\n return false;\n }\n value->resize((size_t)len);\n return ReadAll(input, value->data(), (size_t)len);\n }\n\n" + }, + { + "chunk_id": "doc_26_chunk_2", + "original_index": 2, + "content": " return false;\n}\n\ninline bool WireFormat::ReadBytes(InputStream& input, void* buf, size_t len) {\n return ReadAll(input, buf, len);\n}\n\ninline bool WireFormat::ReadUInt64(InputStream& input, uint64_t* value) {\n return ReadVarint64(input, value);\n}\n\ntemplate \ninline void WireFormat::WriteFixed(OutputStream& output, const T& value) {\n WriteAll(output, &value, sizeof(T));\n}\n\ninline void WireFormat::WriteBytes(OutputStream& output, const void* buf, size_t len) {\n WriteAll(output, buf, len);\n}\n\ninline void WireFormat::WriteString(OutputStream& output, std::string_view value) {\n WriteVarint64(output, value.size());\n WriteAll(output, value.data(), value.size());\n}\n\ninline void WireFormat::WriteUInt64(OutputStream& output, const uint64_t value) {\n WriteVarint64(output, value);\n}\n\n}\n" + } + ] + }, + { + "doc_id": "doc_27", + "original_uuid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", + "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_27_chunk_0", + "original_index": 0, + "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n" + } + ] + }, + { + "doc_id": "doc_28", + "original_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", + "content": "#pragma once\n\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"nullable.h\"\n\n#include \n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate \nclass ColumnLowCardinalityT;\n\nnamespace details {\n\n/** LowCardinalityHashKey used as key in unique items hashmap to abstract away key value\n * (type of which depends on dictionary column) and to reduce likelehood of collisions.\n *\n * In order to dramatically reduce collision rate, we use 2 different hashes from 2 different hash functions.\n * First hash is used in hashtable (to calculate item position).\n * Second one is used as part of key value and accessed via `operator==()` upon collision resolution/detection.\n */\nusing LowCardinalityHashKey = std::pair;\n\nstruct LowCardinalityHashKeyHash {\n inline std::size_t operator()(const LowCardinalityHashKey &hash_key) const noexcept {\n return hash_key.first;\n }\n};\n\n}\n\n/*\n * LC column contains an \"invisible\" default item at the beginning of the collection. [default, ...]\n * If the nested type is Nullable, it contains a null-item at the beginning and a default item at the second position. [null, default, ...]\n * Null map is not serialized in LC columns. Instead, nulls are tracked by having an index of 0.\n * */\nclass ColumnLowCardinality : public Column {\npublic:\n using UniqueItems = std::unordered_map;\n\n template \n friend class ColumnLowCardinalityT;\n\nprivate:\n // IMPLEMENTATION NOTE: ColumnLowCardinalityT takes reference to underlying dictionary column object,\n // so make sure to NOT change address of the dictionary object (with reset(), swap()) or with anything else.\n ColumnRef dictionary_column_;\n ColumnRef index_column_;\n UniqueItems unique_items_map_;\n\npublic:\n ColumnLowCardinality(ColumnLowCardinality&& col) = default;\n // c-tor makes a deep copy of the dictionary_column.\n explicit ColumnLowCardinality(ColumnRef dictionary_column);\n explicit ColumnLowCardinality(std::shared_ptr dictionary_column);\n\n template \n explicit ColumnLowCardinality(std::shared_ptr> dictionary_column)\n : ColumnLowCardinality(dictionary_column->template As())\n {}\n\n ~ColumnLowCardinality();\n\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends another LowCardinality column to the end of this one, updating dictionary.\n void Append(ColumnRef /*column*/) override;\n\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data.\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of current column, with compacted dictionary\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n ItemView GetItem(size_t index) const override;\n\n size_t GetDictionarySize() const;\n TypeRef GetNestedType() const;\n\nprotected:\n std::uint64_t getDictionaryIndex(std::uint64_t item_index) const;\n void appendIndex(std::uint64_t item_index);\n void removeLastIndex();\n ColumnRef GetDictionary();\n\n void AppendUnsafe(const ItemView &);\n\nprivate:\n void Setup(ColumnRef dictionary_column);\n void AppendNullItem();\n void AppendDefaultItem();\n\npublic:\n static details::LowCardinalityHashKey computeHashKey(const ItemView &);\n};\n\n/** Type-aware wrapper that provides simple convenience interface for accessing/appending individual items.\n */\ntemplate \nclass ColumnLowCardinalityT : public ColumnLowCardinality {\n\n DictionaryColumnType& typed_dictionary_;\n const Type::Code type_;\n\npublic:\n using WrappedColumnType = DictionaryColumnType;\n // Type this column takes as argument of Append and returns with At() and operator[]\n using ValueType = typename DictionaryColumnType::ValueType;\n\n explicit ColumnLowCardinalityT(ColumnLowCardinality&& col)\n : ColumnLowCardinality(std::move(col))\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {\n }\n\n template \n explicit ColumnLowCardinalityT(Args &&... args)\n : ColumnLowCardinalityT(std::make_shared(std::forward(args)...))\n {}\n\n // Create LC column from existing T-column, making a deep copy of all contents.\n explicit ColumnLowCardinalityT(std::shared_ptr dictionary_col)\n : ColumnLowCardinality(dictionary_col)\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {}\n\n /// Extended interface to simplify reading/adding individual items.\n\n /// Returns element at given row number.\n inline ValueType At(size_t n) const {\n return typed_dictionary_.At(getDictionaryIndex(n));\n }\n\n /// Returns element at given row number.\n inline ValueType operator [] (size_t n) const {\n return typed_dictionary_[getDictionaryIndex(n)];\n }\n\n // so the non-virtual Append below doesn't shadow Append() from base class when compiled with older compilers.\n using ColumnLowCardinality::Append;\n\n inline void Append(const ValueType & value) {\n if constexpr (IsNullable) {\n if (value.has_value()) {\n AppendUnsafe(ItemView{type_, *value});\n } else {\n AppendUnsafe(ItemView{});\n }\n } else {\n AppendUnsafe(ItemView{type_, value});\n }\n }\n\n template \n inline void AppendMany(const T& container) {\n for (const auto & item : container) {\n Append(item);\n }\n }\n\n /** Create a ColumnLowCardinalityT from a ColumnLowCardinality, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnLowCardinality&& col) {\n return std::make_shared>(std::move(col));\n }\n\n static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnLowCardinality::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnLowCardinality::CloneEmpty()); }\n\nprivate:\n\n template \n static auto GetTypeCode(T& column) {\n if constexpr (IsNullable) {\n return GetTypeCode(*column.Nested()->template AsStrict());\n } else {\n return column.Type()->GetCode();\n }\n }\n};\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_28_chunk_0", + "original_index": 0, + "content": "#pragma once\n\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"nullable.h\"\n\n#include \n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate \nclass ColumnLowCardinalityT;\n\nnamespace details {\n\n/** LowCardinalityHashKey used as key in unique items hashmap to abstract away key value\n * (type of which depends on dictionary column) and to reduce likelehood of collisions.\n *\n * In order to dramatically reduce collision rate, we use 2 different hashes from 2 different hash functions.\n * First hash is used in hashtable (to calculate item position).\n * Second one is used as part of key value and accessed via `operator==()` upon collision resolution/detection.\n */\nusing LowCardinalityHashKey = std::pair;\n\n" + }, + { + "chunk_id": "doc_28_chunk_1", + "original_index": 1, + "content": "struct LowCardinalityHashKeyHash {\n inline std::size_t operator()(const LowCardinalityHashKey &hash_key) const noexcept {\n return hash_key.first;\n }\n};\n\n}\n\n/*\n * LC column contains an \"invisible\" default item at the beginning of the collection. [default, ...]\n * If the nested type is Nullable, it contains a null-item at the beginning and a default item at the second position. [null, default, ...]\n * Null map is not serialized in LC columns. Instead, nulls are tracked by having an index of 0.\n * */\nclass ColumnLowCardinality : public Column {\npublic:\n using UniqueItems = std::unordered_map;\n\n" + }, + { + "chunk_id": "doc_28_chunk_2", + "original_index": 2, + "content": " template \n friend class ColumnLowCardinalityT;\n\nprivate:\n // IMPLEMENTATION NOTE: ColumnLowCardinalityT takes reference to underlying dictionary column object,\n // so make sure to NOT change address of the dictionary object (with reset(), swap()) or with anything else.\n ColumnRef dictionary_column_;\n ColumnRef index_column_;\n UniqueItems unique_items_map_;\n\npublic:\n ColumnLowCardinality(ColumnLowCardinality&& col) = default;\n // c-tor makes a deep copy of the dictionary_column.\n explicit ColumnLowCardinality(ColumnRef dictionary_column);\n explicit ColumnLowCardinality(std::shared_ptr dictionary_column);\n\n template \n explicit ColumnLowCardinality(std::shared_ptr> dictionary_column)\n : ColumnLowCardinality(dictionary_column->template As())\n {}\n\n" + }, + { + "chunk_id": "doc_28_chunk_3", + "original_index": 3, + "content": " ~ColumnLowCardinality();\n\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends another LowCardinality column to the end of this one, updating dictionary.\n void Append(ColumnRef /*column*/) override;\n\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data.\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n" + }, + { + "chunk_id": "doc_28_chunk_4", + "original_index": 4, + "content": " /// Makes slice of current column, with compacted dictionary\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n ItemView GetItem(size_t index) const override;\n\n size_t GetDictionarySize() const;\n TypeRef GetNestedType() const;\n\nprotected:\n std::uint64_t getDictionaryIndex(std::uint64_t item_index) const;\n void appendIndex(std::uint64_t item_index);\n void removeLastIndex();\n ColumnRef GetDictionary();\n\n void AppendUnsafe(const ItemView &);\n\nprivate:\n void Setup(ColumnRef dictionary_column);\n void AppendNullItem();\n void AppendDefaultItem();\n\npublic:\n static details::LowCardinalityHashKey computeHashKey(const ItemView &);\n};\n\n/** Type-aware wrapper that provides simple convenience interface for accessing/appending individual items.\n */\ntemplate \nclass ColumnLowCardinalityT : public ColumnLowCardinality {\n\n" + }, + { + "chunk_id": "doc_28_chunk_5", + "original_index": 5, + "content": " DictionaryColumnType& typed_dictionary_;\n const Type::Code type_;\n\npublic:\n using WrappedColumnType = DictionaryColumnType;\n // Type this column takes as argument of Append and returns with At() and operator[]\n using ValueType = typename DictionaryColumnType::ValueType;\n\n explicit ColumnLowCardinalityT(ColumnLowCardinality&& col)\n : ColumnLowCardinality(std::move(col))\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {\n }\n\n template \n explicit ColumnLowCardinalityT(Args &&... args)\n : ColumnLowCardinalityT(std::make_shared(std::forward(args)...))\n {}\n\n" + }, + { + "chunk_id": "doc_28_chunk_6", + "original_index": 6, + "content": " // Create LC column from existing T-column, making a deep copy of all contents.\n explicit ColumnLowCardinalityT(std::shared_ptr dictionary_col)\n : ColumnLowCardinality(dictionary_col)\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {}\n\n /// Extended interface to simplify reading/adding individual items.\n\n /// Returns element at given row number.\n inline ValueType At(size_t n) const {\n return typed_dictionary_.At(getDictionaryIndex(n));\n }\n\n /// Returns element at given row number.\n inline ValueType operator [] (size_t n) const {\n return typed_dictionary_[getDictionaryIndex(n)];\n }\n\n // so the non-virtual Append below doesn't shadow Append() from base class when compiled with older compilers.\n using ColumnLowCardinality::Append;\n\n" + }, + { + "chunk_id": "doc_28_chunk_7", + "original_index": 7, + "content": " inline void Append(const ValueType & value) {\n if constexpr (IsNullable) {\n if (value.has_value()) {\n AppendUnsafe(ItemView{type_, *value});\n } else {\n AppendUnsafe(ItemView{});\n }\n } else {\n AppendUnsafe(ItemView{type_, value});\n }\n }\n\n template \n inline void AppendMany(const T& container) {\n for (const auto & item : container) {\n Append(item);\n }\n }\n\n" + }, + { + "chunk_id": "doc_28_chunk_8", + "original_index": 8, + "content": " /** Create a ColumnLowCardinalityT from a ColumnLowCardinality, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnLowCardinality&& col) {\n return std::make_shared>(std::move(col));\n }\n\n" + }, + { + "chunk_id": "doc_28_chunk_9", + "original_index": 9, + "content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnLowCardinality::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnLowCardinality::CloneEmpty()); }\n\nprivate:\n\n template \n static auto GetTypeCode(T& column) {\n if constexpr (IsNullable) {\n return GetTypeCode(*column.Nested()->template AsStrict());\n } else {\n return column.Type()->GetCode();\n }\n }\n};\n\n}\n" + } + ] + }, + { + "doc_id": "doc_29", + "original_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", + "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", + "chunks": [ + { + "chunk_id": "doc_29_chunk_0", + "original_index": 0, + "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\n" + }, + { + "chunk_id": "doc_29_chunk_1", + "original_index": 1, + "content": "public:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n" + }, + { + "chunk_id": "doc_29_chunk_2", + "original_index": 2, + "content": " /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n" + } + ] + }, + { + "doc_id": "doc_30", + "original_uuid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", + "content": "#pragma once\n\n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate ()(std::declval())),\n typename Value = std::decay_t>\nclass ProjectedIterator {\npublic:\n using value_type = Value;\n using reference = Reference;\n using pointer = Reference;\n using difference_type = typename std::iterator_traits::difference_type;\n using iterator_category = typename std::iterator_traits::iterator_category;\n\n ProjectedIterator() = default;\n\n inline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\n : iterator_(iterator)\n , functor_(std::move(functor)) {\n }\n\n inline UnaryFunction functor() const { return functor; }\n\n inline Iterator const& base() const { return iterator_; }\n\n inline reference operator*() const { return functor_(iterator_); }\n\n inline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n }\n\n inline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n }\n\n inline bool operator==(const ProjectedIterator& other) const {\n return this->iterator_ == other.iterator_;\n }\n\n inline bool operator!=(const ProjectedIterator& other) const {\n return !(*this == other);\n }\n\nprivate:\n Iterator iterator_;\n UnaryFunction functor_;\n};\n\n} // namespace clickhouse\n", + "chunks": [ + { + "chunk_id": "doc_30_chunk_0", + "original_index": 0, + "content": "#pragma once\n\n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate ()(std::declval())),\n typename Value = std::decay_t>\nclass ProjectedIterator {\npublic:\n using value_type = Value;\n using reference = Reference;\n using pointer = Reference;\n using difference_type = typename std::iterator_traits::difference_type;\n using iterator_category = typename std::iterator_traits::iterator_category;\n\n" + }, + { + "chunk_id": "doc_30_chunk_1", + "original_index": 1, + "content": " ProjectedIterator() = default;\n\n inline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\n : iterator_(iterator)\n , functor_(std::move(functor)) {\n }\n\n inline UnaryFunction functor() const { return functor; }\n\n inline Iterator const& base() const { return iterator_; }\n\n inline reference operator*() const { return functor_(iterator_); }\n\n inline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n }\n\n inline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n }\n\n inline bool operator==(const ProjectedIterator& other) const {\n return this->iterator_ == other.iterator_;\n }\n\n inline bool operator!=(const ProjectedIterator& other) const {\n return !(*this == other);\n }\n\nprivate:\n Iterator iterator_;\n UnaryFunction functor_;\n};\n\n} // namespace clickhouse\n" + } + ] + }, + { + "doc_id": "doc_31", + "original_uuid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", + "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", + "chunks": [ + { + "chunk_id": "doc_31_chunk_0", + "original_index": 0, + "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n" + } + ] + }, + { + "doc_id": "doc_32", + "original_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", + "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\nimport com.fasterxml.jackson.annotation.JsonIgnoreProperties;\nimport com.google.cloud.tools.jib.api.LogEvent;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.http.Request;\nimport com.google.cloud.tools.jib.http.Response;\nimport com.google.cloud.tools.jib.json.JsonTemplate;\nimport com.google.cloud.tools.jib.json.JsonTemplateMapper;\nimport com.google.cloud.tools.jib.plugins.common.globalconfig.GlobalConfig;\nimport com.google.common.annotations.VisibleForTesting;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardCopyOption;\nimport java.time.Duration;\nimport java.time.Instant;\nimport java.time.format.DateTimeParseException;\nimport java.util.Optional;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Future;\nimport java.util.function.Consumer;\n\n/** Checks if Jib is up-to-date. */\npublic class UpdateChecker {\n\n private static final String LAST_UPDATE_CHECK_FILENAME = \"lastUpdateCheck\";\n\n /** JSON template for content downloaded during version check. */\n @JsonIgnoreProperties(ignoreUnknown = true)\n private static class VersionJsonTemplate implements JsonTemplate {\n private String latest = \"\";\n }\n\n /**\n * Begins checking for an update in a separate thread.\n *\n * @param executorService the {@link ExecutorService}\n * @param versionUrl the location to check for the latest version\n * @param toolName the tool name\n * @param toolVersion the tool version\n * @param log {@link Consumer} used to log messages\n * @return a new {@link UpdateChecker}\n */\n public static Future> checkForUpdate(\n ExecutorService executorService,\n String versionUrl,\n String toolName,\n String toolVersion,\n Consumer log) {\n return executorService.submit(\n () ->\n performUpdateCheck(\n GlobalConfig.getConfigDir(), toolVersion, versionUrl, toolName, log));\n }\n\n @VisibleForTesting\n static Optional performUpdateCheck(\n Path configDir,\n String currentVersion,\n String versionUrl,\n String toolName,\n Consumer log) {\n Path lastUpdateCheck = configDir.resolve(LAST_UPDATE_CHECK_FILENAME);\n\n try {\n // Check time of last update check\n if (Files.exists(lastUpdateCheck)) {\n try {\n String fileContents =\n new String(Files.readAllBytes(lastUpdateCheck), StandardCharsets.UTF_8);\n Instant modifiedTime = Instant.parse(fileContents);\n if (modifiedTime.plus(Duration.ofDays(1)).isAfter(Instant.now())) {\n return Optional.empty();\n }\n } catch (DateTimeParseException | IOException ex) {\n // If reading update time failed, file might be corrupt, so delete it\n log.accept(LogEvent.debug(\"Failed to read lastUpdateCheck; \" + ex.getMessage()));\n Files.delete(lastUpdateCheck);\n }\n }\n\n // Check for update\n FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n try {\n Response response =\n httpClient.get(\n new URL(versionUrl),\n Request.builder()\n .setHttpTimeout(3000)\n .setUserAgent(\"jib \" + currentVersion + \" \" + toolName)\n .build());\n VersionJsonTemplate version =\n JsonTemplateMapper.readJson(response.getBody(), VersionJsonTemplate.class);\n\n Path lastUpdateCheckTemp =\n Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null);\n lastUpdateCheckTemp.toFile().deleteOnExit();\n Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8));\n Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING);\n\n if (currentVersion.equals(version.latest)) {\n return Optional.empty();\n }\n return Optional.of(version.latest);\n } finally {\n httpClient.shutDown();\n }\n\n } catch (IOException ex) {\n log.accept(LogEvent.debug(\"Update check failed; \" + ex.getMessage()));\n }\n\n return Optional.empty();\n }\n\n /**\n * Returns the latest Jib version available if the check succeeded and the current version is\n * outdated, or returns {@code Optional.empty()} if the check was interrupted or did not determine\n * that a later version was available.\n *\n * @param updateMessageFuture the {@link Future} returned by {@link UpdateChecker#checkForUpdate}\n * @return the latest version, if found, else {@code Optional.empty()}.\n */\n public static Optional finishUpdateCheck(Future> updateMessageFuture) {\n if (updateMessageFuture.isDone()) {\n try {\n return updateMessageFuture.get();\n } catch (InterruptedException | ExecutionException ex) {\n // No need to restore the interrupted status. The intention here is to silently consume any\n // kind of error\n }\n }\n updateMessageFuture.cancel(true);\n return Optional.empty();\n }\n\n private UpdateChecker() {}\n}\n", + "chunks": [ + { + "chunk_id": "doc_32_chunk_0", + "original_index": 0, + "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\n" + }, + { + "chunk_id": "doc_32_chunk_1", + "original_index": 1, + "content": "import com.fasterxml.jackson.annotation.JsonIgnoreProperties;\nimport com.google.cloud.tools.jib.api.LogEvent;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.http.Request;\nimport com.google.cloud.tools.jib.http.Response;\nimport com.google.cloud.tools.jib.json.JsonTemplate;\nimport com.google.cloud.tools.jib.json.JsonTemplateMapper;\nimport com.google.cloud.tools.jib.plugins.common.globalconfig.GlobalConfig;\nimport com.google.common.annotations.VisibleForTesting;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardCopyOption;\nimport java.time.Duration;\nimport java.time.Instant;\nimport java.time.format.DateTimeParseException;\nimport java.util.Optional;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Future;\nimport java.util.function.Consumer;\n\n" + }, + { + "chunk_id": "doc_32_chunk_2", + "original_index": 2, + "content": "/** Checks if Jib is up-to-date. */\npublic class UpdateChecker {\n\n private static final String LAST_UPDATE_CHECK_FILENAME = \"lastUpdateCheck\";\n\n /** JSON template for content downloaded during version check. */\n @JsonIgnoreProperties(ignoreUnknown = true)\n private static class VersionJsonTemplate implements JsonTemplate {\n private String latest = \"\";\n }\n\n" + }, + { + "chunk_id": "doc_32_chunk_3", + "original_index": 3, + "content": " /**\n * Begins checking for an update in a separate thread.\n *\n * @param executorService the {@link ExecutorService}\n * @param versionUrl the location to check for the latest version\n * @param toolName the tool name\n * @param toolVersion the tool version\n * @param log {@link Consumer} used to log messages\n * @return a new {@link UpdateChecker}\n */\n public static Future> checkForUpdate(\n ExecutorService executorService,\n String versionUrl,\n String toolName,\n String toolVersion,\n Consumer log) {\n return executorService.submit(\n () ->\n performUpdateCheck(\n GlobalConfig.getConfigDir(), toolVersion, versionUrl, toolName, log));\n }\n\n" + }, + { + "chunk_id": "doc_32_chunk_4", + "original_index": 4, + "content": " @VisibleForTesting\n static Optional performUpdateCheck(\n Path configDir,\n String currentVersion,\n String versionUrl,\n String toolName,\n Consumer log) {\n Path lastUpdateCheck = configDir.resolve(LAST_UPDATE_CHECK_FILENAME);\n\n try {\n // Check time of last update check\n if (Files.exists(lastUpdateCheck)) {\n try {\n String fileContents =\n new String(Files.readAllBytes(lastUpdateCheck), StandardCharsets.UTF_8);\n Instant modifiedTime = Instant.parse(fileContents);\n if (modifiedTime.plus(Duration.ofDays(1)).isAfter(Instant.now())) {\n return Optional.empty();\n }\n } catch (DateTimeParseException | IOException ex) {\n // If reading update time failed, file might be corrupt, so delete it\n log.accept(LogEvent.debug(\"Failed to read lastUpdateCheck; \" + ex.getMessage()));\n Files.delete(lastUpdateCheck);\n }\n }\n\n" + }, + { + "chunk_id": "doc_32_chunk_5", + "original_index": 5, + "content": " // Check for update\n FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n try {\n Response response =\n httpClient.get(\n new URL(versionUrl),\n Request.builder()\n .setHttpTimeout(3000)\n .setUserAgent(\"jib \" + currentVersion + \" \" + toolName)\n .build());\n VersionJsonTemplate version =\n JsonTemplateMapper.readJson(response.getBody(), VersionJsonTemplate.class);\n\n" + }, + { + "chunk_id": "doc_32_chunk_6", + "original_index": 6, + "content": " Path lastUpdateCheckTemp =\n Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null);\n lastUpdateCheckTemp.toFile().deleteOnExit();\n Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8));\n Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING);\n\n if (currentVersion.equals(version.latest)) {\n return Optional.empty();\n }\n return Optional.of(version.latest);\n } finally {\n httpClient.shutDown();\n }\n\n } catch (IOException ex) {\n log.accept(LogEvent.debug(\"Update check failed; \" + ex.getMessage()));\n }\n\n return Optional.empty();\n }\n\n" + }, + { + "chunk_id": "doc_32_chunk_7", + "original_index": 7, + "content": " /**\n * Returns the latest Jib version available if the check succeeded and the current version is\n * outdated, or returns {@code Optional.empty()} if the check was interrupted or did not determine\n * that a later version was available.\n *\n * @param updateMessageFuture the {@link Future} returned by {@link UpdateChecker#checkForUpdate}\n * @return the latest version, if found, else {@code Optional.empty()}.\n */\n public static Optional finishUpdateCheck(Future> updateMessageFuture) {\n if (updateMessageFuture.isDone()) {\n try {\n return updateMessageFuture.get();\n } catch (InterruptedException | ExecutionException ex) {\n // No need to restore the interrupted status. The intention here is to silently consume any\n // kind of error\n }\n }\n updateMessageFuture.cancel(true);\n return Optional.empty();\n }\n\n private UpdateChecker() {}\n}\n" + } + ] + }, + { + "doc_id": "doc_33", + "original_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\nimport static com.google.common.truth.Truth.assertThat;\nimport static com.google.common.truth.Truth8.assertThat;\nimport static org.junit.Assert.assertThrows;\nimport static org.mockito.ArgumentMatchers.anyString;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport com.google.cloud.tools.jib.api.Credential;\nimport com.google.cloud.tools.jib.api.CredentialRetriever;\nimport com.google.cloud.tools.jib.frontend.CredentialRetrieverFactory;\nimport com.google.common.collect.ImmutableMap;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n/** Tests for {@link DefaultCredentialRetrievers}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class DefaultCredentialRetrieversTest {\n\n @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Mock private CredentialRetrieverFactory mockCredentialRetrieverFactory;\n @Mock private CredentialRetriever mockDockerCredentialHelperCredentialRetriever;\n @Mock private CredentialRetriever mockKnownCredentialRetriever;\n @Mock private CredentialRetriever mockInferredCredentialRetriever;\n @Mock private CredentialRetriever mockWellKnownCredentialHelpersCredentialRetriever;\n @Mock private CredentialRetriever mockXdgPrimaryCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockApplicationDefaultCredentialRetriever;\n\n private Properties properties;\n private Map environment;\n\n private final Credential knownCredential = Credential.from(\"username\", \"password\");\n private final Credential inferredCredential = Credential.from(\"username2\", \"password2\");\n\n @Before\n public void setUp() {\n properties = new Properties();\n properties.setProperty(\"os.name\", \"unknown\");\n properties.setProperty(\"user.home\", Paths.get(\"/system/home\").toString());\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/docker_config\").toString(),\n \"XDG_RUNTIME_DIR\",\n Paths.get(\"/run/user/1000\").toString(),\n \"XDG_CONFIG_HOME\",\n Paths.get(\"/env/home/.config\").toString());\n\n when(mockCredentialRetrieverFactory.dockerCredentialHelper(anyString()))\n .thenReturn(mockDockerCredentialHelperCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(knownCredential, \"credentialSource\"))\n .thenReturn(mockKnownCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(inferredCredential, \"inferredCredentialSource\"))\n .thenReturn(mockInferredCredentialRetriever);\n when(mockCredentialRetrieverFactory.wellKnownCredentialHelpers())\n .thenReturn(mockWellKnownCredentialHelpersCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/run/user/1000/containers/auth.json\")))\n .thenReturn(mockXdgPrimaryCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.config/containers/auth.json\")))\n .thenReturn(mockEnvHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.config/containers/auth.json\")))\n .thenReturn(mockSystemHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/config.json\")))\n .thenReturn(mockDockerConfigEnvDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/.dockerconfigjson\")))\n .thenReturn(mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(Paths.get(\"/docker_config/.dockercfg\")))\n .thenReturn(mockDockerConfigEnvLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/system/home/.docker/config.json\")))\n .thenReturn(mockSystemHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockSystemHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/system/home/.docker/.dockercfg\")))\n .thenReturn(mockSystemHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/env/home/.docker/config.json\")))\n .thenReturn(mockEnvHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockEnvHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/env/home/.docker/.dockercfg\")))\n .thenReturn(mockEnvHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.googleApplicationDefaultCredentials())\n .thenReturn(mockApplicationDefaultCredentialRetriever);\n }\n\n @Test\n public void testAsList() throws FileNotFoundException {\n List retriever =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retriever)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testAsList_all() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setKnownCredential(knownCredential, \"credentialSource\")\n .setInferredCredential(inferredCredential, \"inferredCredentialSource\")\n .setCredentialHelper(\"credentialHelperSuffix\")\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockKnownCredentialRetriever,\n mockDockerCredentialHelperCredentialRetriever,\n mockInferredCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n verify(mockCredentialRetrieverFactory).known(knownCredential, \"credentialSource\");\n verify(mockCredentialRetrieverFactory).known(inferredCredential, \"inferredCredentialSource\");\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(\"docker-credential-credentialHelperSuffix\");\n }\n\n @Test\n public void testAsList_credentialHelperPath() throws IOException {\n Path fakeCredentialHelperPath = temporaryFolder.newFile(\"fake-credHelper\").toPath();\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(fakeCredentialHelperPath.toString());\n\n List retrievers = credentialRetrievers.asList();\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(fakeCredentialHelperPath.toString());\n\n Files.delete(fakeCredentialHelperPath);\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex)\n .hasMessageThat()\n .isEqualTo(\"Specified credential helper was not found: \" + fakeCredentialHelperPath);\n }\n\n @Test\n public void testDockerConfigRetrievers_undefinedHome() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(\n mockCredentialRetrieverFactory, new Properties(), new HashMap<>())\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testDockerConfigRetrievers_noDuplicateRetrievers() throws FileNotFoundException {\n properties.setProperty(\"user.home\", Paths.get(\"/env/home\").toString());\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/env/home/.docker\").toString());\n retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockEnvHomeXdgCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testCredentialHelper_cmdExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.cmd\").toPath();\n Path pathWithoutCmd = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutCmd.getParent().resolve(\"foo.cmd\"));\n\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString())\n .asList();\n\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testCredentialHelper_exeExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.exe\").toPath();\n Path pathWithoutExe = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutExe.getParent().resolve(\"foo.exe\"));\n\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString())\n .asList();\n\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_33_chunk_0", + "original_index": 0, + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n" + }, + { + "chunk_id": "doc_33_chunk_1", + "original_index": 1, + "content": "package com.google.cloud.tools.jib.plugins.common;\n\nimport static com.google.common.truth.Truth.assertThat;\nimport static com.google.common.truth.Truth8.assertThat;\nimport static org.junit.Assert.assertThrows;\nimport static org.mockito.ArgumentMatchers.anyString;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\n" + }, + { + "chunk_id": "doc_33_chunk_2", + "original_index": 2, + "content": "import com.google.cloud.tools.jib.api.Credential;\nimport com.google.cloud.tools.jib.api.CredentialRetriever;\nimport com.google.cloud.tools.jib.frontend.CredentialRetrieverFactory;\nimport com.google.common.collect.ImmutableMap;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n/** Tests for {@link DefaultCredentialRetrievers}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class DefaultCredentialRetrieversTest {\n\n @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n" + }, + { + "chunk_id": "doc_33_chunk_3", + "original_index": 3, + "content": " @Mock private CredentialRetrieverFactory mockCredentialRetrieverFactory;\n @Mock private CredentialRetriever mockDockerCredentialHelperCredentialRetriever;\n @Mock private CredentialRetriever mockKnownCredentialRetriever;\n @Mock private CredentialRetriever mockInferredCredentialRetriever;\n @Mock private CredentialRetriever mockWellKnownCredentialHelpersCredentialRetriever;\n @Mock private CredentialRetriever mockXdgPrimaryCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvLegacyDockerConfigCredentialRetriever;\n" + }, + { + "chunk_id": "doc_33_chunk_4", + "original_index": 4, + "content": " @Mock private CredentialRetriever mockSystemHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockApplicationDefaultCredentialRetriever;\n\n" + }, + { + "chunk_id": "doc_33_chunk_5", + "original_index": 5, + "content": " private Properties properties;\n private Map environment;\n\n private final Credential knownCredential = Credential.from(\"username\", \"password\");\n private final Credential inferredCredential = Credential.from(\"username2\", \"password2\");\n\n @Before\n public void setUp() {\n properties = new Properties();\n properties.setProperty(\"os.name\", \"unknown\");\n properties.setProperty(\"user.home\", Paths.get(\"/system/home\").toString());\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/docker_config\").toString(),\n \"XDG_RUNTIME_DIR\",\n Paths.get(\"/run/user/1000\").toString(),\n \"XDG_CONFIG_HOME\",\n Paths.get(\"/env/home/.config\").toString());\n\n" + }, + { + "chunk_id": "doc_33_chunk_6", + "original_index": 6, + "content": " when(mockCredentialRetrieverFactory.dockerCredentialHelper(anyString()))\n .thenReturn(mockDockerCredentialHelperCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(knownCredential, \"credentialSource\"))\n .thenReturn(mockKnownCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(inferredCredential, \"inferredCredentialSource\"))\n .thenReturn(mockInferredCredentialRetriever);\n when(mockCredentialRetrieverFactory.wellKnownCredentialHelpers())\n .thenReturn(mockWellKnownCredentialHelpersCredentialRetriever);\n\n" + }, + { + "chunk_id": "doc_33_chunk_7", + "original_index": 7, + "content": " when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/run/user/1000/containers/auth.json\")))\n .thenReturn(mockXdgPrimaryCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.config/containers/auth.json\")))\n .thenReturn(mockEnvHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.config/containers/auth.json\")))\n .thenReturn(mockSystemHomeXdgCredentialRetriever);\n\n" + }, + { + "chunk_id": "doc_33_chunk_8", + "original_index": 8, + "content": " when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/config.json\")))\n .thenReturn(mockDockerConfigEnvDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/.dockerconfigjson\")))\n .thenReturn(mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(Paths.get(\"/docker_config/.dockercfg\")))\n .thenReturn(mockDockerConfigEnvLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/system/home/.docker/config.json\")))\n .thenReturn(mockSystemHomeDockerConfigCredentialRetriever);\n" + }, + { + "chunk_id": "doc_33_chunk_9", + "original_index": 9, + "content": " when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockSystemHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/system/home/.docker/.dockercfg\")))\n .thenReturn(mockSystemHomeLegacyDockerConfigCredentialRetriever);\n" + }, + { + "chunk_id": "doc_33_chunk_10", + "original_index": 10, + "content": " when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/env/home/.docker/config.json\")))\n .thenReturn(mockEnvHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockEnvHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/env/home/.docker/.dockercfg\")))\n .thenReturn(mockEnvHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.googleApplicationDefaultCredentials())\n .thenReturn(mockApplicationDefaultCredentialRetriever);\n }\n\n" + }, + { + "chunk_id": "doc_33_chunk_11", + "original_index": 11, + "content": " @Test\n public void testAsList() throws FileNotFoundException {\n List retriever =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retriever)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n" + }, + { + "chunk_id": "doc_33_chunk_12", + "original_index": 12, + "content": " mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n" + }, + { + "chunk_id": "doc_33_chunk_13", + "original_index": 13, + "content": " @Test\n public void testAsList_all() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setKnownCredential(knownCredential, \"credentialSource\")\n .setInferredCredential(inferredCredential, \"inferredCredentialSource\")\n .setCredentialHelper(\"credentialHelperSuffix\")\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockKnownCredentialRetriever,\n mockDockerCredentialHelperCredentialRetriever,\n mockInferredCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n" + }, + { + "chunk_id": "doc_33_chunk_14", + "original_index": 14, + "content": " mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n" + }, + { + "chunk_id": "doc_33_chunk_15", + "original_index": 15, + "content": " verify(mockCredentialRetrieverFactory).known(knownCredential, \"credentialSource\");\n verify(mockCredentialRetrieverFactory).known(inferredCredential, \"inferredCredentialSource\");\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(\"docker-credential-credentialHelperSuffix\");\n }\n\n" + }, + { + "chunk_id": "doc_33_chunk_16", + "original_index": 16, + "content": " @Test\n public void testAsList_credentialHelperPath() throws IOException {\n Path fakeCredentialHelperPath = temporaryFolder.newFile(\"fake-credHelper\").toPath();\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(fakeCredentialHelperPath.toString());\n\n" + }, + { + "chunk_id": "doc_33_chunk_17", + "original_index": 17, + "content": " List retrievers = credentialRetrievers.asList();\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n" + }, + { + "chunk_id": "doc_33_chunk_18", + "original_index": 18, + "content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(fakeCredentialHelperPath.toString());\n\n" + }, + { + "chunk_id": "doc_33_chunk_19", + "original_index": 19, + "content": " Files.delete(fakeCredentialHelperPath);\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex)\n .hasMessageThat()\n .isEqualTo(\"Specified credential helper was not found: \" + fakeCredentialHelperPath);\n }\n\n @Test\n public void testDockerConfigRetrievers_undefinedHome() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(\n mockCredentialRetrieverFactory, new Properties(), new HashMap<>())\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n" + }, + { + "chunk_id": "doc_33_chunk_20", + "original_index": 20, + "content": " @Test\n public void testDockerConfigRetrievers_noDuplicateRetrievers() throws FileNotFoundException {\n properties.setProperty(\"user.home\", Paths.get(\"/env/home\").toString());\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n" + }, + { + "chunk_id": "doc_33_chunk_21", + "original_index": 21, + "content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n" + }, + { + "chunk_id": "doc_33_chunk_22", + "original_index": 22, + "content": " environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/env/home/.docker\").toString());\n retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockEnvHomeXdgCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n" + }, + { + "chunk_id": "doc_33_chunk_23", + "original_index": 23, + "content": " @Test\n public void testCredentialHelper_cmdExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.cmd\").toPath();\n Path pathWithoutCmd = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutCmd.getParent().resolve(\"foo.cmd\"));\n\n" + }, + { + "chunk_id": "doc_33_chunk_24", + "original_index": 24, + "content": " DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString())\n .asList();\n\n" + }, + { + "chunk_id": "doc_33_chunk_25", + "original_index": 25, + "content": " assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n" + }, + { + "chunk_id": "doc_33_chunk_26", + "original_index": 26, + "content": " @Test\n public void testCredentialHelper_exeExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.exe\").toPath();\n Path pathWithoutExe = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutExe.getParent().resolve(\"foo.exe\"));\n\n" + }, + { + "chunk_id": "doc_33_chunk_27", + "original_index": 27, + "content": " DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString())\n .asList();\n\n" + }, + { + "chunk_id": "doc_33_chunk_28", + "original_index": 28, + "content": " assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_34", + "original_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", + "content": "/*\n * Copyright 2019 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.api;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;\nimport com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;\nimport com.google.cloud.tools.jib.api.buildplan.FilePermissions;\nimport com.google.common.collect.ArrayListMultimap;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Multimap;\nimport com.google.common.io.CharStreams;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.time.Instant;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.BiConsumer;\nimport java.util.zip.GZIPInputStream;\nimport org.apache.commons.compress.archivers.tar.TarArchiveEntry;\nimport org.apache.commons.compress.archivers.tar.TarArchiveInputStream;\nimport org.junit.BeforeClass;\nimport org.junit.ClassRule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\n/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n imageTar = new File(imageLocation.getRoot(), \"image.tar\");\n Containerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\n Jib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n }\n\n @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n @Test\n public void testManifest() throws IOException {\n String expectedManifest =\n \"[{\\\"Config\\\":\\\"config.json\\\",\\\"RepoTags\\\":[\\\"jib-core/reproducible:latest\\\"],\"\n + \"\\\"Layers\\\":[\\\"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\\\",\\\"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\\\",\\\"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\\\"]}]\";\n String generatedManifest = extractFromTarFileAsString(imageTar, \"manifest.json\");\n assertThat(generatedManifest).isEqualTo(expectedManifest);\n }\n\n @Test\n public void testConfiguration() throws IOException {\n String expectedConfig =\n \"{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\"\n + \"\\\"config\\\":{\\\"Env\\\":[],\\\"Entrypoint\\\":[\\\"echo\\\",\\\"Hello World\\\"],\\\"ExposedPorts\\\":{},\\\"Labels\\\":{},\\\"Volumes\\\":{}},\"\n + \"\\\"history\\\":[{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"}],\"\n + \"\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:18e4f44e6d1835bd968339b166057bd17ab7d4cbb56dc7262a5cafea7cf8d405\\\",\\\"sha256:13369c34f073f2b9c1fa6431e23d925f1a8eac65b1726c8cc8fcc2596c69b414\\\",\\\"sha256:4f92c507112d7880ca0f504ef8272b7fdee107263270125036a260a741565923\\\"]}}\";\n String generatedConfig = extractFromTarFileAsString(imageTar, \"config.json\");\n assertThat(generatedConfig).isEqualTo(expectedConfig);\n }\n\n @Test\n public void testImageLayout() throws IOException {\n Set paths = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n paths.add(layerEntry.getName());\n }\n });\n assertThat(paths)\n .containsExactly(\n \"app/fileA.txt\",\n \"app/fileB.txt\",\n \"app/fileC.txt\",\n \"app/fileD.txt\",\n \"app/subdir/fileE.txt\");\n }\n\n @Test\n public void testAllFileAndDirectories() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) ->\n assertThat(layerEntry.isFile() || layerEntry.isDirectory()).isTrue());\n }\n\n @Test\n public void testTimestampsEpochPlus1s() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) -> {\n Instant modificationTime = layerEntry.getLastModifiedDate().toInstant();\n assertThat(modificationTime).isEqualTo(Instant.ofEpochSecond(1));\n });\n }\n\n @Test\n public void testPermissions() throws IOException {\n assertThat(FilePermissions.DEFAULT_FILE_PERMISSIONS.getPermissionBits()).isEqualTo(0644);\n assertThat(FilePermissions.DEFAULT_FOLDER_PERMISSIONS.getPermissionBits()).isEqualTo(0755);\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0644);\n } else if (layerEntry.isDirectory()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0755);\n }\n });\n }\n\n @Test\n public void testNoImplicitParentDirectories() throws IOException {\n Set directories = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n String entryPath = layerEntry.getName();\n if (layerEntry.isDirectory()) {\n assertThat(entryPath.endsWith(\"/\")).isTrue();\n entryPath = entryPath.substring(0, entryPath.length() - 1);\n }\n\n int lastSlashPosition = entryPath.lastIndexOf('/');\n String parent = entryPath.substring(0, Math.max(0, lastSlashPosition));\n if (!parent.isEmpty()) {\n assertThat(directories.contains(parent)).isTrue();\n }\n if (layerEntry.isDirectory()) {\n directories.add(entryPath);\n }\n });\n }\n\n @Test\n public void testFileOrdering() throws IOException {\n Multimap layerPaths = ArrayListMultimap.create();\n layerEntriesDo((layerName, layerEntry) -> layerPaths.put(layerName, layerEntry.getName()));\n for (Collection paths : layerPaths.asMap().values()) {\n List sorted = new ArrayList<>(paths);\n // ReproducibleLayerBuilder sorts by TarArchiveEntry::getName()\n Collections.sort(sorted);\n assertThat(paths).containsExactlyElementsIn(sorted).inOrder();\n }\n }\n\n private void layerEntriesDo(BiConsumer layerConsumer)\n throws IOException {\n\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n String imageEntryName = imageEntry.getName();\n // assume all .tar.gz files are layers\n if (imageEntry.isFile() && imageEntryName.endsWith(\".tar.gz\")) {\n @SuppressWarnings(\"resource\") // must not close sub-streams\n TarArchiveInputStream layer = new TarArchiveInputStream(new GZIPInputStream(input));\n TarArchiveEntry layerEntry;\n while ((layerEntry = layer.getNextTarEntry()) != null) {\n layerConsumer.accept(imageEntryName, layerEntry);\n }\n }\n }\n }\n }\n\n private static String extractFromTarFileAsString(File tarFile, String filename)\n throws IOException {\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(tarFile.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n if (filename.equals(imageEntry.getName())) {\n return CharStreams.toString(new InputStreamReader(input, StandardCharsets.UTF_8));\n }\n }\n }\n throw new AssertionError(\"file not found: \" + filename);\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_34_chunk_0", + "original_index": 0, + "content": "/*\n * Copyright 2019 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n" + }, + { + "chunk_id": "doc_34_chunk_1", + "original_index": 1, + "content": "package com.google.cloud.tools.jib.api;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;\nimport com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;\nimport com.google.cloud.tools.jib.api.buildplan.FilePermissions;\nimport com.google.common.collect.ArrayListMultimap;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Multimap;\nimport com.google.common.io.CharStreams;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\n" + }, + { + "chunk_id": "doc_34_chunk_2", + "original_index": 2, + "content": "import java.nio.file.Path;\nimport java.time.Instant;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.BiConsumer;\nimport java.util.zip.GZIPInputStream;\nimport org.apache.commons.compress.archivers.tar.TarArchiveEntry;\nimport org.apache.commons.compress.archivers.tar.TarArchiveInputStream;\nimport org.junit.BeforeClass;\nimport org.junit.ClassRule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\n" + }, + { + "chunk_id": "doc_34_chunk_3", + "original_index": 3, + "content": "/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n" + }, + { + "chunk_id": "doc_34_chunk_4", + "original_index": 4, + "content": " imageTar = new File(imageLocation.getRoot(), \"image.tar\");\n Containerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\n Jib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n }\n\n" + }, + { + "chunk_id": "doc_34_chunk_5", + "original_index": 5, + "content": " @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n" + }, + { + "chunk_id": "doc_34_chunk_6", + "original_index": 6, + "content": " @Test\n public void testManifest() throws IOException {\n String expectedManifest =\n \"[{\\\"Config\\\":\\\"config.json\\\",\\\"RepoTags\\\":[\\\"jib-core/reproducible:latest\\\"],\"\n + \"\\\"Layers\\\":[\\\"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\\\",\\\"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\\\",\\\"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\\\"]}]\";\n String generatedManifest = extractFromTarFileAsString(imageTar, \"manifest.json\");\n assertThat(generatedManifest).isEqualTo(expectedManifest);\n }\n\n" + }, + { + "chunk_id": "doc_34_chunk_7", + "original_index": 7, + "content": " @Test\n public void testConfiguration() throws IOException {\n String expectedConfig =\n \"{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\"\n + \"\\\"config\\\":{\\\"Env\\\":[],\\\"Entrypoint\\\":[\\\"echo\\\",\\\"Hello World\\\"],\\\"ExposedPorts\\\":{},\\\"Labels\\\":{},\\\"Volumes\\\":{}},\"\n + \"\\\"history\\\":[{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"}],\"\n" + }, + { + "chunk_id": "doc_34_chunk_8", + "original_index": 8, + "content": " + \"\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:18e4f44e6d1835bd968339b166057bd17ab7d4cbb56dc7262a5cafea7cf8d405\\\",\\\"sha256:13369c34f073f2b9c1fa6431e23d925f1a8eac65b1726c8cc8fcc2596c69b414\\\",\\\"sha256:4f92c507112d7880ca0f504ef8272b7fdee107263270125036a260a741565923\\\"]}}\";\n String generatedConfig = extractFromTarFileAsString(imageTar, \"config.json\");\n assertThat(generatedConfig).isEqualTo(expectedConfig);\n }\n\n @Test\n public void testImageLayout() throws IOException {\n Set paths = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n paths.add(layerEntry.getName());\n }\n });\n assertThat(paths)\n .containsExactly(\n \"app/fileA.txt\",\n \"app/fileB.txt\",\n \"app/fileC.txt\",\n \"app/fileD.txt\",\n \"app/subdir/fileE.txt\");\n }\n\n" + }, + { + "chunk_id": "doc_34_chunk_9", + "original_index": 9, + "content": " @Test\n public void testAllFileAndDirectories() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) ->\n assertThat(layerEntry.isFile() || layerEntry.isDirectory()).isTrue());\n }\n\n @Test\n public void testTimestampsEpochPlus1s() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) -> {\n Instant modificationTime = layerEntry.getLastModifiedDate().toInstant();\n assertThat(modificationTime).isEqualTo(Instant.ofEpochSecond(1));\n });\n }\n\n" + }, + { + "chunk_id": "doc_34_chunk_10", + "original_index": 10, + "content": " @Test\n public void testPermissions() throws IOException {\n assertThat(FilePermissions.DEFAULT_FILE_PERMISSIONS.getPermissionBits()).isEqualTo(0644);\n assertThat(FilePermissions.DEFAULT_FOLDER_PERMISSIONS.getPermissionBits()).isEqualTo(0755);\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0644);\n } else if (layerEntry.isDirectory()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0755);\n }\n });\n }\n\n" + }, + { + "chunk_id": "doc_34_chunk_11", + "original_index": 11, + "content": " @Test\n public void testNoImplicitParentDirectories() throws IOException {\n Set directories = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n String entryPath = layerEntry.getName();\n if (layerEntry.isDirectory()) {\n assertThat(entryPath.endsWith(\"/\")).isTrue();\n entryPath = entryPath.substring(0, entryPath.length() - 1);\n }\n\n int lastSlashPosition = entryPath.lastIndexOf('/');\n String parent = entryPath.substring(0, Math.max(0, lastSlashPosition));\n if (!parent.isEmpty()) {\n assertThat(directories.contains(parent)).isTrue();\n }\n if (layerEntry.isDirectory()) {\n directories.add(entryPath);\n }\n });\n }\n\n" + }, + { + "chunk_id": "doc_34_chunk_12", + "original_index": 12, + "content": " @Test\n public void testFileOrdering() throws IOException {\n Multimap layerPaths = ArrayListMultimap.create();\n layerEntriesDo((layerName, layerEntry) -> layerPaths.put(layerName, layerEntry.getName()));\n for (Collection paths : layerPaths.asMap().values()) {\n List sorted = new ArrayList<>(paths);\n // ReproducibleLayerBuilder sorts by TarArchiveEntry::getName()\n Collections.sort(sorted);\n assertThat(paths).containsExactlyElementsIn(sorted).inOrder();\n }\n }\n\n private void layerEntriesDo(BiConsumer layerConsumer)\n throws IOException {\n\n" + }, + { + "chunk_id": "doc_34_chunk_13", + "original_index": 13, + "content": " try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n String imageEntryName = imageEntry.getName();\n // assume all .tar.gz files are layers\n if (imageEntry.isFile() && imageEntryName.endsWith(\".tar.gz\")) {\n @SuppressWarnings(\"resource\") // must not close sub-streams\n TarArchiveInputStream layer = new TarArchiveInputStream(new GZIPInputStream(input));\n TarArchiveEntry layerEntry;\n while ((layerEntry = layer.getNextTarEntry()) != null) {\n layerConsumer.accept(imageEntryName, layerEntry);\n }\n }\n }\n }\n }\n\n" + }, + { + "chunk_id": "doc_34_chunk_14", + "original_index": 14, + "content": " private static String extractFromTarFileAsString(File tarFile, String filename)\n throws IOException {\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(tarFile.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n if (filename.equals(imageEntry.getName())) {\n return CharStreams.toString(new InputStreamReader(input, StandardCharsets.UTF_8));\n }\n }\n }\n throw new AssertionError(\"file not found: \" + filename);\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_35", + "original_uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_35_chunk_0", + "original_index": 0, + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_36", + "original_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_36_chunk_0", + "original_index": 0, + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n" + }, + { + "chunk_id": "doc_36_chunk_1", + "original_index": 1, + "content": "package com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n" + }, + { + "chunk_id": "doc_36_chunk_2", + "original_index": 2, + "content": " private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n" + }, + { + "chunk_id": "doc_36_chunk_3", + "original_index": 3, + "content": " @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n" + }, + { + "chunk_id": "doc_36_chunk_4", + "original_index": 4, + "content": " @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n" + }, + { + "chunk_id": "doc_36_chunk_5", + "original_index": 5, + "content": " @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n" + }, + { + "chunk_id": "doc_36_chunk_6", + "original_index": 6, + "content": " @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n" + }, + { + "chunk_id": "doc_36_chunk_7", + "original_index": 7, + "content": " @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_37", + "original_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.registry;\n\nimport com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.blob.Blob;\nimport com.google.cloud.tools.jib.event.EventHandlers;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.image.json.V22ManifestTemplate;\nimport com.google.common.io.ByteStreams;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.concurrent.atomic.LongAdder;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Test;\n\n/** Integration tests for {@link BlobPuller}. */\npublic class BlobPullerIntegrationTest {\n\n private final FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n\n @Test\n public void testPull() throws IOException, RegistryException {\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n V22ManifestTemplate manifestTemplate =\n registryClient\n .pullManifest(\n ManifestPullerIntegrationTest.KNOWN_MANIFEST_V22_SHA, V22ManifestTemplate.class)\n .getManifest();\n\n DescriptorDigest realDigest = manifestTemplate.getLayers().get(0).getDigest();\n\n // Pulls a layer BLOB of the distroless/base image.\n LongAdder totalByteCount = new LongAdder();\n LongAdder expectedSize = new LongAdder();\n Blob pulledBlob =\n registryClient.pullBlob(\n realDigest,\n size -> {\n Assert.assertEquals(0, expectedSize.sum());\n expectedSize.add(size);\n },\n totalByteCount::add);\n Assert.assertEquals(realDigest, pulledBlob.writeTo(ByteStreams.nullOutputStream()).getDigest());\n Assert.assertTrue(expectedSize.sum() > 0);\n Assert.assertEquals(expectedSize.sum(), totalByteCount.sum());\n }\n\n @Test\n public void testPull_unknownBlob() throws IOException, DigestException {\n DescriptorDigest nonexistentDigest =\n DescriptorDigest.fromHash(\n \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\");\n\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n\n try {\n registryClient\n .pullBlob(nonexistentDigest, ignored -> {}, ignored -> {})\n .writeTo(ByteStreams.nullOutputStream());\n Assert.fail(\"Trying to pull nonexistent blob should have errored\");\n\n } catch (IOException ex) {\n if (!(ex.getCause() instanceof RegistryErrorException)) {\n throw ex;\n }\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"pull BLOB for gcr.io/distroless/base with digest \" + nonexistentDigest));\n }\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_37_chunk_0", + "original_index": 0, + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.registry;\n\n" + }, + { + "chunk_id": "doc_37_chunk_1", + "original_index": 1, + "content": "import com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.blob.Blob;\nimport com.google.cloud.tools.jib.event.EventHandlers;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.image.json.V22ManifestTemplate;\nimport com.google.common.io.ByteStreams;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.concurrent.atomic.LongAdder;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Test;\n\n" + }, + { + "chunk_id": "doc_37_chunk_2", + "original_index": 2, + "content": "/** Integration tests for {@link BlobPuller}. */\npublic class BlobPullerIntegrationTest {\n\n private final FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n\n @Test\n public void testPull() throws IOException, RegistryException {\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n V22ManifestTemplate manifestTemplate =\n registryClient\n .pullManifest(\n ManifestPullerIntegrationTest.KNOWN_MANIFEST_V22_SHA, V22ManifestTemplate.class)\n .getManifest();\n\n DescriptorDigest realDigest = manifestTemplate.getLayers().get(0).getDigest();\n\n" + }, + { + "chunk_id": "doc_37_chunk_3", + "original_index": 3, + "content": " // Pulls a layer BLOB of the distroless/base image.\n LongAdder totalByteCount = new LongAdder();\n LongAdder expectedSize = new LongAdder();\n Blob pulledBlob =\n registryClient.pullBlob(\n realDigest,\n size -> {\n Assert.assertEquals(0, expectedSize.sum());\n expectedSize.add(size);\n },\n totalByteCount::add);\n Assert.assertEquals(realDigest, pulledBlob.writeTo(ByteStreams.nullOutputStream()).getDigest());\n Assert.assertTrue(expectedSize.sum() > 0);\n Assert.assertEquals(expectedSize.sum(), totalByteCount.sum());\n }\n\n" + }, + { + "chunk_id": "doc_37_chunk_4", + "original_index": 4, + "content": " @Test\n public void testPull_unknownBlob() throws IOException, DigestException {\n DescriptorDigest nonexistentDigest =\n DescriptorDigest.fromHash(\n \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\");\n\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n\n try {\n registryClient\n .pullBlob(nonexistentDigest, ignored -> {}, ignored -> {})\n .writeTo(ByteStreams.nullOutputStream());\n Assert.fail(\"Trying to pull nonexistent blob should have errored\");\n\n } catch (IOException ex) {\n if (!(ex.getCause() instanceof RegistryErrorException)) {\n throw ex;\n }\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"pull BLOB for gcr.io/distroless/base with digest \" + nonexistentDigest));\n }\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_38", + "original_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\nimport com.google.api.client.http.HttpResponseException;\nimport com.google.api.client.http.HttpStatusCodes;\nimport com.google.cloud.tools.jib.api.CacheDirectoryCreationException;\nimport com.google.cloud.tools.jib.api.Containerizer;\nimport com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.ImageReference;\nimport com.google.cloud.tools.jib.api.InsecureRegistryException;\nimport com.google.cloud.tools.jib.api.JibContainer;\nimport com.google.cloud.tools.jib.api.JibContainerBuilder;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.api.RegistryUnauthorizedException;\nimport com.google.cloud.tools.jib.registry.RegistryCredentialsNotSentException;\nimport com.google.common.collect.ImmutableSet;\nimport java.io.IOException;\nimport java.net.UnknownHostException;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport org.apache.http.conn.HttpHostConnectException;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.Mockito;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n/** Tests for {@link JibBuildRunner}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class JibBuildRunnerTest {\n\n private static final HelpfulSuggestions TEST_HELPFUL_SUGGESTIONS =\n new HelpfulSuggestions(\n \"messagePrefix\", \"clearCacheCommand\", \"toConfig\", \"toFlag\", \"buildFile\");\n\n @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Mock private JibContainerBuilder mockJibContainerBuilder;\n @Mock private JibContainer mockJibContainer;\n @Mock private Containerizer mockContainerizer;\n @Mock private RegistryUnauthorizedException mockRegistryUnauthorizedException;\n @Mock private RegistryCredentialsNotSentException mockRegistryCredentialsNotSentException;\n @Mock private HttpResponseException mockHttpResponseException;\n\n private JibBuildRunner testJibBuildRunner;\n\n @Before\n public void setUpMocks() {\n testJibBuildRunner =\n new JibBuildRunner(\n mockJibContainerBuilder,\n mockContainerizer,\n ignored -> {},\n TEST_HELPFUL_SUGGESTIONS,\n \"ignored\",\n \"ignored\");\n }\n\n @Test\n public void testBuildImage_pass()\n throws BuildStepsExecutionException, IOException, CacheDirectoryCreationException {\n JibContainer buildResult = testJibBuildRunner.runBuild();\n Assert.assertNull(buildResult);\n }\n\n @Test\n public void testBuildImage_httpHostConnectException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n HttpHostConnectException mockHttpHostConnectException =\n Mockito.mock(HttpHostConnectException.class);\n Mockito.doThrow(mockHttpHostConnectException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forHttpHostConnect(), ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_unknownHostException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n UnknownHostException mockUnknownHostException = Mockito.mock(UnknownHostException.class);\n Mockito.doThrow(mockUnknownHostException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forUnknownHost(), ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_insecureRegistryException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n InsecureRegistryException mockInsecureRegistryException =\n Mockito.mock(InsecureRegistryException.class);\n Mockito.doThrow(mockInsecureRegistryException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forInsecureRegistry(), ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_registryUnauthorizedException_statusCodeForbidden()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode())\n .thenReturn(HttpStatusCodes.STATUS_CODE_FORBIDDEN);\n\n Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forHttpStatusCodeForbidden(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_registryUnauthorizedException_noCredentials()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode()).thenReturn(-1); // Unknown\n\n Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forNoCredentialsDefined(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_registryCredentialsNotSentException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(mockRegistryCredentialsNotSentException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forCredentialsNotSent(), ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_other()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(new RegistryException(\"messagePrefix\"))\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.none(), ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_writesImageJson() throws Exception {\n final ImageReference targetImageReference = ImageReference.parse(\"gcr.io/distroless/java:11\");\n final String imageId =\n \"sha256:61bb3ec31a47cb730eb58a38bbfa813761a51dca69d10e39c24c3d00a7b2c7a9\";\n final String digest = \"sha256:3f1be7e19129edb202c071a659a4db35280ab2bb1a16f223bfd5d1948657b6fc\";\n final Set tags = ImmutableSet.of(\"latest\", \"0.1.41-69d10e-20200116T101403\");\n\n final Path outputPath = temporaryFolder.newFile(\"jib-image.json\").toPath();\n\n Mockito.when(mockJibContainer.getTargetImage()).thenReturn(targetImageReference);\n Mockito.when(mockJibContainer.getImageId()).thenReturn(DescriptorDigest.fromDigest(imageId));\n Mockito.when(mockJibContainer.getDigest()).thenReturn(DescriptorDigest.fromDigest(digest));\n Mockito.when(mockJibContainer.getTags()).thenReturn(tags);\n Mockito.when(mockJibContainerBuilder.containerize(mockContainerizer))\n .thenReturn(mockJibContainer);\n Mockito.when(mockJibContainer.isImagePushed()).thenReturn(true);\n testJibBuildRunner.writeImageJson(outputPath).runBuild();\n\n final String outputJson = new String(Files.readAllBytes(outputPath), StandardCharsets.UTF_8);\n final ImageMetadataOutput metadataOutput = ImageMetadataOutput.fromJson(outputJson);\n Assert.assertEquals(targetImageReference.toString(), metadataOutput.getImage());\n Assert.assertEquals(imageId, metadataOutput.getImageId());\n Assert.assertEquals(digest, metadataOutput.getImageDigest());\n Assert.assertEquals(tags, ImmutableSet.copyOf(metadataOutput.getTags()));\n Assert.assertTrue(metadataOutput.isImagePushed());\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_38_chunk_0", + "original_index": 0, + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\n" + }, + { + "chunk_id": "doc_38_chunk_1", + "original_index": 1, + "content": "import com.google.api.client.http.HttpResponseException;\nimport com.google.api.client.http.HttpStatusCodes;\nimport com.google.cloud.tools.jib.api.CacheDirectoryCreationException;\nimport com.google.cloud.tools.jib.api.Containerizer;\nimport com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.ImageReference;\nimport com.google.cloud.tools.jib.api.InsecureRegistryException;\nimport com.google.cloud.tools.jib.api.JibContainer;\nimport com.google.cloud.tools.jib.api.JibContainerBuilder;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.api.RegistryUnauthorizedException;\nimport com.google.cloud.tools.jib.registry.RegistryCredentialsNotSentException;\nimport com.google.common.collect.ImmutableSet;\nimport java.io.IOException;\nimport java.net.UnknownHostException;\n" + }, + { + "chunk_id": "doc_38_chunk_2", + "original_index": 2, + "content": "import java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport org.apache.http.conn.HttpHostConnectException;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.Mockito;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n" + }, + { + "chunk_id": "doc_38_chunk_3", + "original_index": 3, + "content": "/** Tests for {@link JibBuildRunner}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class JibBuildRunnerTest {\n\n private static final HelpfulSuggestions TEST_HELPFUL_SUGGESTIONS =\n new HelpfulSuggestions(\n \"messagePrefix\", \"clearCacheCommand\", \"toConfig\", \"toFlag\", \"buildFile\");\n\n @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Mock private JibContainerBuilder mockJibContainerBuilder;\n @Mock private JibContainer mockJibContainer;\n @Mock private Containerizer mockContainerizer;\n @Mock private RegistryUnauthorizedException mockRegistryUnauthorizedException;\n @Mock private RegistryCredentialsNotSentException mockRegistryCredentialsNotSentException;\n @Mock private HttpResponseException mockHttpResponseException;\n\n" + }, + { + "chunk_id": "doc_38_chunk_4", + "original_index": 4, + "content": " private JibBuildRunner testJibBuildRunner;\n\n @Before\n public void setUpMocks() {\n testJibBuildRunner =\n new JibBuildRunner(\n mockJibContainerBuilder,\n mockContainerizer,\n ignored -> {},\n TEST_HELPFUL_SUGGESTIONS,\n \"ignored\",\n \"ignored\");\n }\n\n @Test\n public void testBuildImage_pass()\n throws BuildStepsExecutionException, IOException, CacheDirectoryCreationException {\n JibContainer buildResult = testJibBuildRunner.runBuild();\n Assert.assertNull(buildResult);\n }\n\n" + }, + { + "chunk_id": "doc_38_chunk_5", + "original_index": 5, + "content": " @Test\n public void testBuildImage_httpHostConnectException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n HttpHostConnectException mockHttpHostConnectException =\n Mockito.mock(HttpHostConnectException.class);\n Mockito.doThrow(mockHttpHostConnectException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forHttpHostConnect(), ex.getMessage());\n }\n }\n\n" + }, + { + "chunk_id": "doc_38_chunk_6", + "original_index": 6, + "content": " @Test\n public void testBuildImage_unknownHostException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n UnknownHostException mockUnknownHostException = Mockito.mock(UnknownHostException.class);\n Mockito.doThrow(mockUnknownHostException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forUnknownHost(), ex.getMessage());\n }\n }\n\n" + }, + { + "chunk_id": "doc_38_chunk_7", + "original_index": 7, + "content": " @Test\n public void testBuildImage_insecureRegistryException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n InsecureRegistryException mockInsecureRegistryException =\n Mockito.mock(InsecureRegistryException.class);\n Mockito.doThrow(mockInsecureRegistryException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forInsecureRegistry(), ex.getMessage());\n }\n }\n\n" + }, + { + "chunk_id": "doc_38_chunk_8", + "original_index": 8, + "content": " @Test\n public void testBuildImage_registryUnauthorizedException_statusCodeForbidden()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode())\n .thenReturn(HttpStatusCodes.STATUS_CODE_FORBIDDEN);\n\n" + }, + { + "chunk_id": "doc_38_chunk_9", + "original_index": 9, + "content": " Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forHttpStatusCodeForbidden(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n" + }, + { + "chunk_id": "doc_38_chunk_10", + "original_index": 10, + "content": " @Test\n public void testBuildImage_registryUnauthorizedException_noCredentials()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode()).thenReturn(-1); // Unknown\n\n Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forNoCredentialsDefined(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n" + }, + { + "chunk_id": "doc_38_chunk_11", + "original_index": 11, + "content": " @Test\n public void testBuildImage_registryCredentialsNotSentException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(mockRegistryCredentialsNotSentException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forCredentialsNotSent(), ex.getMessage());\n }\n }\n\n" + }, + { + "chunk_id": "doc_38_chunk_12", + "original_index": 12, + "content": " @Test\n public void testBuildImage_other()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(new RegistryException(\"messagePrefix\"))\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.none(), ex.getMessage());\n }\n }\n\n" + }, + { + "chunk_id": "doc_38_chunk_13", + "original_index": 13, + "content": " @Test\n public void testBuildImage_writesImageJson() throws Exception {\n final ImageReference targetImageReference = ImageReference.parse(\"gcr.io/distroless/java:11\");\n final String imageId =\n \"sha256:61bb3ec31a47cb730eb58a38bbfa813761a51dca69d10e39c24c3d00a7b2c7a9\";\n final String digest = \"sha256:3f1be7e19129edb202c071a659a4db35280ab2bb1a16f223bfd5d1948657b6fc\";\n final Set tags = ImmutableSet.of(\"latest\", \"0.1.41-69d10e-20200116T101403\");\n\n final Path outputPath = temporaryFolder.newFile(\"jib-image.json\").toPath();\n\n" + }, + { + "chunk_id": "doc_38_chunk_14", + "original_index": 14, + "content": " Mockito.when(mockJibContainer.getTargetImage()).thenReturn(targetImageReference);\n Mockito.when(mockJibContainer.getImageId()).thenReturn(DescriptorDigest.fromDigest(imageId));\n Mockito.when(mockJibContainer.getDigest()).thenReturn(DescriptorDigest.fromDigest(digest));\n Mockito.when(mockJibContainer.getTags()).thenReturn(tags);\n Mockito.when(mockJibContainerBuilder.containerize(mockContainerizer))\n .thenReturn(mockJibContainer);\n Mockito.when(mockJibContainer.isImagePushed()).thenReturn(true);\n testJibBuildRunner.writeImageJson(outputPath).runBuild();\n\n" + }, + { + "chunk_id": "doc_38_chunk_15", + "original_index": 15, + "content": " final String outputJson = new String(Files.readAllBytes(outputPath), StandardCharsets.UTF_8);\n final ImageMetadataOutput metadataOutput = ImageMetadataOutput.fromJson(outputJson);\n Assert.assertEquals(targetImageReference.toString(), metadataOutput.getImage());\n Assert.assertEquals(imageId, metadataOutput.getImageId());\n Assert.assertEquals(digest, metadataOutput.getImageDigest());\n Assert.assertEquals(tags, ImmutableSet.copyOf(metadataOutput.getTags()));\n Assert.assertTrue(metadataOutput.isImagePushed());\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_39", + "original_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.maven;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.Command;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.Arrays;\nimport org.apache.maven.it.VerificationException;\nimport org.apache.maven.it.Verifier;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Assume;\nimport org.junit.ClassRule;\nimport org.junit.Test;\n\n/** Integration tests for {@link BuildDockerMojo}. */\npublic class BuildDockerMojoIntegrationTest {\n\n @ClassRule public static final TestProject simpleTestProject = new TestProject(\"simple\");\n\n @ClassRule public static final TestProject emptyTestProject = new TestProject(\"empty\");\n\n @ClassRule\n public static final TestProject defaultTargetTestProject = new TestProject(\"default-target\");\n\n private static void buildToDockerDaemon(TestProject project, String imageReference, String pomXml)\n throws VerificationException, DigestException, IOException {\n Verifier verifier = new Verifier(project.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", imageReference);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=\" + pomXml);\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyErrorFreeLog();\n\n BuildImageMojoIntegrationTest.readDigestFile(\n project.getProjectRoot().resolve(\"target/jib-image.digest\"));\n }\n\n /**\n * Builds and runs jib:buildDocker on a project at {@code projectRoot} pushing to {@code\n * imageReference}.\n */\n private static String buildToDockerDaemonAndRun(TestProject project, String imageReference)\n throws VerificationException, IOException, InterruptedException, DigestException {\n buildToDockerDaemon(project, imageReference, \"pom.xml\");\n\n String dockerInspectVolumes =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Volumes}}'\", imageReference).run();\n String dockerInspectExposedPorts =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.ExposedPorts}}'\", imageReference)\n .run();\n String dockerInspectLabels =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Labels}}'\", imageReference).run();\n String history = new Command(\"docker\", \"history\", imageReference).run();\n\n MatcherAssert.assertThat(\n dockerInspectVolumes, CoreMatchers.containsString(\"\\\"/var/log\\\":{},\\\"/var/log2\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectExposedPorts,\n CoreMatchers.containsString(\n \"\\\"1000/tcp\\\":{},\\\"2000/udp\\\":{},\\\"2001/udp\\\":{},\\\"2002/udp\\\":{},\\\"2003/udp\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectLabels,\n CoreMatchers.containsString(\"\\\"key1\\\":\\\"value1\\\",\\\"key2\\\":\\\"value2\\\"\"));\n\n return new Command(\"docker\", \"run\", \"--rm\", imageReference).run();\n }\n\n @Test\n public void testExecute_simple()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\n \"Hello, world. An argument.\\n1970-01-01T00:00:01Z\\nrw-r--r--\\nrw-r--r--\\nfoo\\ncat\\n\"\n + \"1970-01-01T00:00:01Z\\n1970-01-01T00:00:01Z\\n\",\n buildToDockerDaemonAndRun(simpleTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_simple_extraDirectoriesFiltering()\n throws DigestException, IOException, InterruptedException, VerificationException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-extra-dirs-filtering.xml\");\n String output =\n new Command(\"docker\", \"run\", \"--rm\", \"--entrypoint=ls\", targetImage, \"-1R\", \"/extras\")\n .run();\n\n // /extras/cat.txt\n // /extras/foo\n // /extras/sub/\n // /extras/sub/a.json\n assertThat(output).isEqualTo(\"/extras:\\ncat.txt\\nfoo\\nsub\\n\\n/extras/sub:\\na.json\\n\");\n }\n\n @Test\n public void testExecute_dockerClient()\n throws VerificationException, IOException, InterruptedException {\n Assume.assumeFalse(System.getProperty(\"os.name\").startsWith(\"Windows\"));\n new Command(\n \"chmod\", \"+x\", simpleTestProject.getProjectRoot().resolve(\"mock-docker.sh\").toString())\n .run();\n\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-dockerclient.xml\");\n verifier.addCliOption(\"--debug\");\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyTextInLog(\"Docker load called. value1 value2\");\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_empty()\n throws InterruptedException, IOException, VerificationException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\"\", buildToDockerDaemonAndRun(emptyTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_defaultTarget()\n throws VerificationException, IOException, InterruptedException, DigestException {\n Assert.assertEquals(\n \"Hello, world. An argument.\\n\",\n buildToDockerDaemonAndRun(\n defaultTargetTestProject, \"default-target-name:default-target-version\"));\n }\n\n @Test\n public void testExecute_jibSkip() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibSkip(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_jibContainerizeSkips() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibContainerizeSkips(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_userNumeric()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom.xml\");\n Assert.assertEquals(\n \"12345:54321\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_userNames()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"brokenuserimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom-broken-user.xml\");\n Assert.assertEquals(\n \"myuser:mygroup\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_noToImageAndInvalidProjectName()\n throws DigestException, VerificationException, IOException, InterruptedException {\n buildToDockerDaemon(simpleTestProject, \"image reference ignored\", \"pom-no-to-image.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", \"my-artifact-id:1\").run());\n }\n\n @Test\n public void testExecute_jarContainerization()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"jarcontainerizationimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-jar-containerization.xml\");\n Assert.assertEquals(\n \"Hello, world. \\nImplementation-Title: hello-world\\nImplementation-Version: 1\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testExecute_jarContainerizationOnMissingJar() throws IOException {\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"_TARGET_IMAGE\", \"erroronmissingjar\");\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-jar-containerization.xml\");\n verifier.executeGoals(Arrays.asList(\"clean\", \"jib:dockerBuild\"));\n Assert.fail();\n\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"Obtaining project build output files failed; make sure you have packaged your \"\n + \"project before trying to build the image. (Did you accidentally run \\\"mvn \"\n + \"clean jib:build\\\" instead of \\\"mvn clean package jib:build\\\"?)\"));\n }\n }\n\n @Test\n public void testExecute_jibRequireVersion_ok() throws VerificationException, IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n // this plugin should match 1.0\n verifier.setSystemProperty(\"jib.requiredVersion\", \"1.0\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_jibRequireVersion_fail() throws IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.requiredVersion\", \"[,1.0]\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n Assert.fail();\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(), CoreMatchers.containsString(\"but is required to be [,1.0]\"));\n }\n }\n\n @Test\n public void testCredHelperConfigurationSimple()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-1.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testCredHelperConfigurationComplex()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-2.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_39_chunk_0", + "original_index": 0, + "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n" + }, + { + "chunk_id": "doc_39_chunk_1", + "original_index": 1, + "content": "package com.google.cloud.tools.jib.maven;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.Command;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.Arrays;\nimport org.apache.maven.it.VerificationException;\nimport org.apache.maven.it.Verifier;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Assume;\nimport org.junit.ClassRule;\nimport org.junit.Test;\n\n/** Integration tests for {@link BuildDockerMojo}. */\npublic class BuildDockerMojoIntegrationTest {\n\n @ClassRule public static final TestProject simpleTestProject = new TestProject(\"simple\");\n\n @ClassRule public static final TestProject emptyTestProject = new TestProject(\"empty\");\n\n @ClassRule\n public static final TestProject defaultTargetTestProject = new TestProject(\"default-target\");\n\n" + }, + { + "chunk_id": "doc_39_chunk_2", + "original_index": 2, + "content": " private static void buildToDockerDaemon(TestProject project, String imageReference, String pomXml)\n throws VerificationException, DigestException, IOException {\n Verifier verifier = new Verifier(project.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", imageReference);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=\" + pomXml);\n verifier.executeGoal(\"package\");\n\n" + }, + { + "chunk_id": "doc_39_chunk_3", + "original_index": 3, + "content": " verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyErrorFreeLog();\n\n BuildImageMojoIntegrationTest.readDigestFile(\n project.getProjectRoot().resolve(\"target/jib-image.digest\"));\n }\n\n /**\n * Builds and runs jib:buildDocker on a project at {@code projectRoot} pushing to {@code\n * imageReference}.\n */\n private static String buildToDockerDaemonAndRun(TestProject project, String imageReference)\n throws VerificationException, IOException, InterruptedException, DigestException {\n buildToDockerDaemon(project, imageReference, \"pom.xml\");\n\n" + }, + { + "chunk_id": "doc_39_chunk_4", + "original_index": 4, + "content": " String dockerInspectVolumes =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Volumes}}'\", imageReference).run();\n String dockerInspectExposedPorts =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.ExposedPorts}}'\", imageReference)\n .run();\n String dockerInspectLabels =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Labels}}'\", imageReference).run();\n String history = new Command(\"docker\", \"history\", imageReference).run();\n\n MatcherAssert.assertThat(\n dockerInspectVolumes, CoreMatchers.containsString(\"\\\"/var/log\\\":{},\\\"/var/log2\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectExposedPorts,\n CoreMatchers.containsString(\n \"\\\"1000/tcp\\\":{},\\\"2000/udp\\\":{},\\\"2001/udp\\\":{},\\\"2002/udp\\\":{},\\\"2003/udp\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectLabels,\n CoreMatchers.containsString(\"\\\"key1\\\":\\\"value1\\\",\\\"key2\\\":\\\"value2\\\"\"));\n\n" + }, + { + "chunk_id": "doc_39_chunk_5", + "original_index": 5, + "content": " return new Command(\"docker\", \"run\", \"--rm\", imageReference).run();\n }\n\n @Test\n public void testExecute_simple()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\n \"Hello, world. An argument.\\n1970-01-01T00:00:01Z\\nrw-r--r--\\nrw-r--r--\\nfoo\\ncat\\n\"\n + \"1970-01-01T00:00:01Z\\n1970-01-01T00:00:01Z\\n\",\n buildToDockerDaemonAndRun(simpleTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n" + }, + { + "chunk_id": "doc_39_chunk_6", + "original_index": 6, + "content": " @Test\n public void testExecute_simple_extraDirectoriesFiltering()\n throws DigestException, IOException, InterruptedException, VerificationException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-extra-dirs-filtering.xml\");\n String output =\n new Command(\"docker\", \"run\", \"--rm\", \"--entrypoint=ls\", targetImage, \"-1R\", \"/extras\")\n .run();\n\n // /extras/cat.txt\n // /extras/foo\n // /extras/sub/\n // /extras/sub/a.json\n assertThat(output).isEqualTo(\"/extras:\\ncat.txt\\nfoo\\nsub\\n\\n/extras/sub:\\na.json\\n\");\n }\n\n @Test\n public void testExecute_dockerClient()\n throws VerificationException, IOException, InterruptedException {\n Assume.assumeFalse(System.getProperty(\"os.name\").startsWith(\"Windows\"));\n new Command(\n \"chmod\", \"+x\", simpleTestProject.getProjectRoot().resolve(\"mock-docker.sh\").toString())\n .run();\n\n" + }, + { + "chunk_id": "doc_39_chunk_7", + "original_index": 7, + "content": " String targetImage = \"simpleimage:maven\" + System.nanoTime();\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-dockerclient.xml\");\n verifier.addCliOption(\"--debug\");\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyTextInLog(\"Docker load called. value1 value2\");\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_empty()\n throws InterruptedException, IOException, VerificationException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n\n" + }, + { + "chunk_id": "doc_39_chunk_8", + "original_index": 8, + "content": " Assert.assertEquals(\"\", buildToDockerDaemonAndRun(emptyTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_defaultTarget()\n throws VerificationException, IOException, InterruptedException, DigestException {\n Assert.assertEquals(\n \"Hello, world. An argument.\\n\",\n buildToDockerDaemonAndRun(\n defaultTargetTestProject, \"default-target-name:default-target-version\"));\n }\n\n" + }, + { + "chunk_id": "doc_39_chunk_9", + "original_index": 9, + "content": " @Test\n public void testExecute_jibSkip() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibSkip(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_jibContainerizeSkips() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibContainerizeSkips(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_userNumeric()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom.xml\");\n Assert.assertEquals(\n \"12345:54321\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n" + }, + { + "chunk_id": "doc_39_chunk_10", + "original_index": 10, + "content": " @Test\n public void testExecute_userNames()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"brokenuserimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom-broken-user.xml\");\n Assert.assertEquals(\n \"myuser:mygroup\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_noToImageAndInvalidProjectName()\n throws DigestException, VerificationException, IOException, InterruptedException {\n buildToDockerDaemon(simpleTestProject, \"image reference ignored\", \"pom-no-to-image.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", \"my-artifact-id:1\").run());\n }\n\n" + }, + { + "chunk_id": "doc_39_chunk_11", + "original_index": 11, + "content": " @Test\n public void testExecute_jarContainerization()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"jarcontainerizationimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-jar-containerization.xml\");\n Assert.assertEquals(\n \"Hello, world. \\nImplementation-Title: hello-world\\nImplementation-Version: 1\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testExecute_jarContainerizationOnMissingJar() throws IOException {\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"_TARGET_IMAGE\", \"erroronmissingjar\");\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-jar-containerization.xml\");\n verifier.executeGoals(Arrays.asList(\"clean\", \"jib:dockerBuild\"));\n Assert.fail();\n\n" + }, + { + "chunk_id": "doc_39_chunk_12", + "original_index": 12, + "content": " } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"Obtaining project build output files failed; make sure you have packaged your \"\n + \"project before trying to build the image. (Did you accidentally run \\\"mvn \"\n + \"clean jib:build\\\" instead of \\\"mvn clean package jib:build\\\"?)\"));\n }\n }\n\n @Test\n public void testExecute_jibRequireVersion_ok() throws VerificationException, IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n" + }, + { + "chunk_id": "doc_39_chunk_13", + "original_index": 13, + "content": " Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n // this plugin should match 1.0\n verifier.setSystemProperty(\"jib.requiredVersion\", \"1.0\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_jibRequireVersion_fail() throws IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n" + }, + { + "chunk_id": "doc_39_chunk_14", + "original_index": 14, + "content": " try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.requiredVersion\", \"[,1.0]\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n Assert.fail();\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(), CoreMatchers.containsString(\"but is required to be [,1.0]\"));\n }\n }\n\n" + }, + { + "chunk_id": "doc_39_chunk_15", + "original_index": 15, + "content": " @Test\n public void testCredHelperConfigurationSimple()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-1.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testCredHelperConfigurationComplex()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-2.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_40", + "original_uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", + "content": "/*\n * Copyright 2017 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.api;\n\nimport java.text.MessageFormat;\n\n/** Thrown because registry authentication failed. */\npublic class RegistryAuthenticationFailedException extends RegistryException {\n\n private static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n private final String serverUrl;\n private final String imageName;\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param cause the underlying cause that triggered this exception\n */\n public RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param reason the underlying reason that triggered this exception\n */\n public RegistryAuthenticationFailedException(String serverUrl, String imageName, String reason) {\n super(MessageFormat.format(REASON, serverUrl, imageName, reason));\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * The server being authenticated.\n *\n * @return the server being authenticated\n */\n public String getServerUrl() {\n return serverUrl;\n }\n\n /**\n * The image being authenticated.\n *\n * @return the image being authenticated\n */\n public String getImageName() {\n return imageName;\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_40_chunk_0", + "original_index": 0, + "content": "/*\n * Copyright 2017 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n" + }, + { + "chunk_id": "doc_40_chunk_1", + "original_index": 1, + "content": "package com.google.cloud.tools.jib.api;\n\nimport java.text.MessageFormat;\n\n/** Thrown because registry authentication failed. */\npublic class RegistryAuthenticationFailedException extends RegistryException {\n\n private static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n private final String serverUrl;\n private final String imageName;\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param cause the underlying cause that triggered this exception\n */\n public RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n" + }, + { + "chunk_id": "doc_40_chunk_2", + "original_index": 2, + "content": " /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param reason the underlying reason that triggered this exception\n */\n public RegistryAuthenticationFailedException(String serverUrl, String imageName, String reason) {\n super(MessageFormat.format(REASON, serverUrl, imageName, reason));\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * The server being authenticated.\n *\n * @return the server being authenticated\n */\n public String getServerUrl() {\n return serverUrl;\n }\n\n /**\n * The image being authenticated.\n *\n * @return the image being authenticated\n */\n public String getImageName() {\n return imageName;\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_41", + "original_uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", + "content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_41_chunk_0", + "original_index": 0, + "content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n" + }, + { + "chunk_id": "doc_41_chunk_1", + "original_index": 1, + "content": " @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_42", + "original_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\n/**\n * Class in the hierarchy to avoid code duplication.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic abstract class AbstractHashingFunction implements HashingFunction\n{\n\n /**\n * Compares two {@link CharSequence}s as byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first CharSequence\n * @param b the second CharSequence\n * @return true if both {@link CharSequence}s are the same, false if not\n */\n protected static boolean slowEquals(CharSequence a, CharSequence b)\n {\n return slowEquals(Utils.fromCharSequenceToBytes(a), Utils.fromCharSequenceToBytes(b));\n }\n\n /**\n * Compares two byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first byte array\n * @param b the second byte array\n * @return true if both byte arrays are the same, false if not\n */\n protected static boolean slowEquals(byte[] a, byte[] b)\n {\n int diff = a.length ^ b.length;\n for (int i = 0; i < a.length && i < b.length; i++)\n {\n diff |= a[i] ^ b[i];\n }\n return diff == 0;\n }\n\n @Override\n public Hash hash(CharSequence plainTextPassword, String salt, CharSequence pepper)\n {\n CharSequence peppered = Utils.append(pepper, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n @Override\n public Hash hash(byte[] plainTextPassword, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n byte[] peppered = Utils.append(pepperAsBytes, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 0.1.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password as bytes array\n * @param hashed the hash as bytes array\n * @param salt the salt as bytes array used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.5.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt, CharSequence pepper)\n {\n return check(Utils.append(pepper, plainTextPassword), hashed, salt);\n }\n\n /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n return check(Utils.append(pepperAsBytes, plainTextPassword), hashed, salt);\n }\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_42_chunk_0", + "original_index": 0, + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\n" + }, + { + "chunk_id": "doc_42_chunk_1", + "original_index": 1, + "content": "package com.password4j;\n\n/**\n * Class in the hierarchy to avoid code duplication.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic abstract class AbstractHashingFunction implements HashingFunction\n{\n\n /**\n * Compares two {@link CharSequence}s as byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first CharSequence\n * @param b the second CharSequence\n * @return true if both {@link CharSequence}s are the same, false if not\n */\n protected static boolean slowEquals(CharSequence a, CharSequence b)\n {\n return slowEquals(Utils.fromCharSequenceToBytes(a), Utils.fromCharSequenceToBytes(b));\n }\n\n" + }, + { + "chunk_id": "doc_42_chunk_2", + "original_index": 2, + "content": " /**\n * Compares two byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first byte array\n * @param b the second byte array\n * @return true if both byte arrays are the same, false if not\n */\n protected static boolean slowEquals(byte[] a, byte[] b)\n {\n int diff = a.length ^ b.length;\n for (int i = 0; i < a.length && i < b.length; i++)\n {\n diff |= a[i] ^ b[i];\n }\n return diff == 0;\n }\n\n" + }, + { + "chunk_id": "doc_42_chunk_3", + "original_index": 3, + "content": " @Override\n public Hash hash(CharSequence plainTextPassword, String salt, CharSequence pepper)\n {\n CharSequence peppered = Utils.append(pepper, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n @Override\n public Hash hash(byte[] plainTextPassword, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n byte[] peppered = Utils.append(pepperAsBytes, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n" + }, + { + "chunk_id": "doc_42_chunk_4", + "original_index": 4, + "content": " /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 0.1.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n" + }, + { + "chunk_id": "doc_42_chunk_5", + "original_index": 5, + "content": " /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password as bytes array\n * @param hashed the hash as bytes array\n * @param salt the salt as bytes array used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n" + }, + { + "chunk_id": "doc_42_chunk_6", + "original_index": 6, + "content": " /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.5.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt, CharSequence pepper)\n {\n return check(Utils.append(pepper, plainTextPassword), hashed, salt);\n }\n\n" + }, + { + "chunk_id": "doc_42_chunk_7", + "original_index": 7, + "content": " /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n return check(Utils.append(pepperAsBytes, plainTextPassword), hashed, salt);\n }\n\n}\n" + } + ] + }, + { + "doc_id": "doc_43", + "original_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", + "content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n // Multiple threads\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 4, 3, \"1832bd8e5cbeba1cb174a13838095e7e66508e9bf04c40178990adbc8ba9eb6f\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 2, 3, \"f8767fe04059cef67b4427cda99bf8bcdd983959dbd399a5e63ea04523716c23\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 3, 3, \"bcad257eff3d1090b50276514857e60db5d0ec484129013ef3c88f7d36e438d6\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n };\n\n\n @Test\n public void test()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = new BalloonHashingFunction((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n }\n\n }\n\n @Test\n public void testInstance()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n Assert.assertTrue(balloonHashingFunction.check(((String) testVector[0]).getBytes(), ((String) testVector[7]).getBytes(), ((String) testVector[1]).getBytes()));\n }\n\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String m = \"SHA-256\";\n int i = 2;\n int p = 3;\n int l = 4;\n int v = 5;\n BalloonHashingFunction balloonHashingFunction = BalloonHashingFunction.getInstance(m, i, p, l, v);\n\n // THEN\n boolean eqNull = balloonHashingFunction.equals(null);\n boolean eqClass = balloonHashingFunction.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = balloonHashingFunction.equals(BalloonHashingFunction.getInstance(m, i, p, l, v));\n boolean sameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v));\n String toString = balloonHashingFunction.toString();\n int hashCode = balloonHashingFunction.hashCode();\n boolean notSameInst1 = balloonHashingFunction.equals(new BalloonHashingFunction(\"SHA-512\", i, p, l, v));\n boolean notSameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i+1, p, l, v));\n boolean notSameInst3 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p+1, l, v));\n boolean notSameInst4 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l+1, v));\n boolean notSameInst6 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v+1));\n\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_43_chunk_0", + "original_index": 0, + "content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n" + }, + { + "chunk_id": "doc_43_chunk_1", + "original_index": 1, + "content": " private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n" + }, + { + "chunk_id": "doc_43_chunk_2", + "original_index": 2, + "content": " // Multiple threads\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 4, 3, \"1832bd8e5cbeba1cb174a13838095e7e66508e9bf04c40178990adbc8ba9eb6f\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 2, 3, \"f8767fe04059cef67b4427cda99bf8bcdd983959dbd399a5e63ea04523716c23\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 3, 3, \"bcad257eff3d1090b50276514857e60db5d0ec484129013ef3c88f7d36e438d6\"},\n" + }, + { + "chunk_id": "doc_43_chunk_3", + "original_index": 3, + "content": " new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n" + }, + { + "chunk_id": "doc_43_chunk_4", + "original_index": 4, + "content": " };\n\n\n @Test\n public void test()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = new BalloonHashingFunction((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n }\n\n }\n\n @Test\n public void testInstance()\n {\n\n" + }, + { + "chunk_id": "doc_43_chunk_5", + "original_index": 5, + "content": " BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n" + }, + { + "chunk_id": "doc_43_chunk_6", + "original_index": 6, + "content": " Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n Assert.assertTrue(balloonHashingFunction.check(((String) testVector[0]).getBytes(), ((String) testVector[7]).getBytes(), ((String) testVector[1]).getBytes()));\n }\n\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String m = \"SHA-256\";\n int i = 2;\n int p = 3;\n int l = 4;\n int v = 5;\n BalloonHashingFunction balloonHashingFunction = BalloonHashingFunction.getInstance(m, i, p, l, v);\n\n" + }, + { + "chunk_id": "doc_43_chunk_7", + "original_index": 7, + "content": " // THEN\n boolean eqNull = balloonHashingFunction.equals(null);\n boolean eqClass = balloonHashingFunction.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = balloonHashingFunction.equals(BalloonHashingFunction.getInstance(m, i, p, l, v));\n boolean sameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v));\n String toString = balloonHashingFunction.toString();\n int hashCode = balloonHashingFunction.hashCode();\n boolean notSameInst1 = balloonHashingFunction.equals(new BalloonHashingFunction(\"SHA-512\", i, p, l, v));\n" + }, + { + "chunk_id": "doc_43_chunk_8", + "original_index": 8, + "content": " boolean notSameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i+1, p, l, v));\n boolean notSameInst3 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p+1, l, v));\n boolean notSameInst4 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l+1, v));\n boolean notSameInst6 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v+1));\n\n" + }, + { + "chunk_id": "doc_43_chunk_9", + "original_index": 9, + "content": " // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n" + } + ] + }, + { + "doc_id": "doc_44", + "original_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\n\n\nclass Blake2b\n{\n private static final long[] IV = { 0x6a09e667f3bcc908L, 0xbb67ae8584caa73bL, 0x3c6ef372fe94f82bL, 0xa54ff53a5f1d36f1L,\n 0x510e527fade682d1L, 0x9b05688c2b3e6c1fL, 0x1f83d9abfb41bd6bL, 0x5be0cd19137e2179L };\n\n private static final byte[][] SIGMA = { { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 }, { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },\n { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 }, { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },\n { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 }, { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },\n { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 }, { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },\n { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 } };\n\n private static final int ROUNDS = 12;\n\n private static final int BLOCK_LENGTH_BYTES = 128;\n\n private final int digestLength;\n\n private final int keyLength;\n\n private final byte[] buffer;\n\n private final long[] internalState = new long[16];\n\n private int bufferPos = 0;\n\n private long[] chainValue = null;\n\n private long t0 = 0L;\n\n private long t1 = 0L;\n\n private long f0 = 0L;\n\n /**\n * Basic sized constructor - size in bytes.\n *\n * @param digestSize size of the digest in bytes\n */\n Blake2b(int digestSize)\n {\n if (digestSize < 1 || digestSize > 64)\n {\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n }\n\n buffer = new byte[BLOCK_LENGTH_BYTES];\n keyLength = 0;\n this.digestLength = digestSize;\n init();\n }\n\n // initialize chainValue\n private void init()\n {\n chainValue = new long[8];\n chainValue[0] = IV[0] ^ (digestLength | ((long) keyLength << 8) | 0x1010000);\n chainValue[1] = IV[1];\n chainValue[2] = IV[2];\n chainValue[3] = IV[3];\n chainValue[4] = IV[4];\n chainValue[5] = IV[5];\n chainValue[6] = IV[6];\n chainValue[7] = IV[7];\n }\n\n private void initializeInternalState()\n {\n System.arraycopy(chainValue, 0, internalState, 0, chainValue.length);\n System.arraycopy(IV, 0, internalState, chainValue.length, 4);\n internalState[12] = t0 ^ IV[4];\n internalState[13] = t1 ^ IV[5];\n internalState[14] = f0 ^ IV[6];\n internalState[15] = IV[7];// ^ f1 with f1 = 0\n }\n\n void update(byte[] message)\n {\n if (message == null)\n {\n return;\n }\n update(message, 0, message.length);\n }\n\n /**\n * update the message digest with a block of bytes.\n *\n * @param message the byte array containing the data.\n * @param offset the offset into the byte array where the data starts.\n * @param len the length of the data.\n */\n void update(byte[] message, int offset, int len)\n {\n int remainingLength = 0;\n\n if (bufferPos != 0)\n {\n remainingLength = BLOCK_LENGTH_BYTES - bufferPos;\n if (remainingLength < len)\n {\n System.arraycopy(message, offset, buffer, bufferPos, remainingLength);\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n bufferPos = 0;\n Arrays.fill(buffer, (byte) 0);// clear buffer\n }\n else\n {\n System.arraycopy(message, offset, buffer, bufferPos, len);\n bufferPos += len;\n return;\n }\n }\n\n int messagePos;\n int blockWiseLastPos = offset + len - BLOCK_LENGTH_BYTES;\n for (messagePos = offset + remainingLength; messagePos < blockWiseLastPos; messagePos += BLOCK_LENGTH_BYTES)\n {\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(message, messagePos);\n }\n\n // fill the buffer with left bytes, this might be a full block\n System.arraycopy(message, messagePos, buffer, 0, offset + len - messagePos);\n bufferPos += offset + len - messagePos;\n }\n\n /**\n * close the digest, producing the final digest value. The doFinal\n * call leaves the digest reset.\n * Key, salt and personal string remain.\n *\n * @param out the array the digest is to be copied into.\n * @param outOffset the offset into the out array the digest is to start at.\n */\n void doFinal(byte[] out, int outOffset)\n {\n\n f0 = 0xFFFFFFFFFFFFFFFFL;\n t0 += bufferPos;\n if (bufferPos > 0 && t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n Arrays.fill(buffer, (byte) 0);// Holds eventually the key if input is null\n Arrays.fill(internalState, 0L);\n\n for (int i = 0; i < chainValue.length && (i * 8 < digestLength); i++)\n {\n byte[] bytes = Utils.longToLittleEndian(chainValue[i]);\n\n if (i * 8 < digestLength - 8)\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, 8);\n }\n else\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, digestLength - (i * 8));\n }\n }\n\n Arrays.fill(chainValue, 0L);\n\n reset();\n }\n\n /**\n * Reset the digest back to it's initial state.\n * The key, the salt and the personal string will\n * remain for further computations.\n */\n void reset()\n {\n bufferPos = 0;\n f0 = 0L;\n t0 = 0L;\n t1 = 0L;\n chainValue = null;\n Arrays.fill(buffer, (byte) 0);\n init();\n }\n\n private void compress(byte[] message, int messagePos)\n {\n\n initializeInternalState();\n\n long[] m = new long[16];\n for (int j = 0; j < 16; j++)\n {\n m[j] = Utils.littleEndianToLong(message, messagePos + j * 8);\n }\n\n for (int round = 0; round < ROUNDS; round++)\n {\n\n // G apply to columns of internalState:m[blake2b_sigma[round][2 *\n // blockPos]] /+1\n functionG(m[SIGMA[round][0]], m[SIGMA[round][1]], 0, 4, 8, 12);\n functionG(m[SIGMA[round][2]], m[SIGMA[round][3]], 1, 5, 9, 13);\n functionG(m[SIGMA[round][4]], m[SIGMA[round][5]], 2, 6, 10, 14);\n functionG(m[SIGMA[round][6]], m[SIGMA[round][7]], 3, 7, 11, 15);\n // G apply to diagonals of internalState:\n functionG(m[SIGMA[round][8]], m[SIGMA[round][9]], 0, 5, 10, 15);\n functionG(m[SIGMA[round][10]], m[SIGMA[round][11]], 1, 6, 11, 12);\n functionG(m[SIGMA[round][12]], m[SIGMA[round][13]], 2, 7, 8, 13);\n functionG(m[SIGMA[round][14]], m[SIGMA[round][15]], 3, 4, 9, 14);\n }\n\n // update chain values:\n for (int offset = 0; offset < chainValue.length; offset++)\n {\n chainValue[offset] = chainValue[offset] ^ internalState[offset] ^ internalState[offset + 8];\n }\n }\n\n private void functionG(long m1, long m2, int posA, int posB, int posC, int posD)\n {\n\n internalState[posA] = internalState[posA] + internalState[posB] + m1;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 32);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 24); // replaces 25 of BLAKE\n internalState[posA] = internalState[posA] + internalState[posB] + m2;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 16);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 63); // replaces 11 of BLAKE\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_44_chunk_0", + "original_index": 0, + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\n\n\nclass Blake2b\n{\n private static final long[] IV = { 0x6a09e667f3bcc908L, 0xbb67ae8584caa73bL, 0x3c6ef372fe94f82bL, 0xa54ff53a5f1d36f1L,\n 0x510e527fade682d1L, 0x9b05688c2b3e6c1fL, 0x1f83d9abfb41bd6bL, 0x5be0cd19137e2179L };\n\n" + }, + { + "chunk_id": "doc_44_chunk_1", + "original_index": 1, + "content": " private static final byte[][] SIGMA = { { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 }, { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },\n { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 }, { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },\n { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 }, { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },\n { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 }, { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },\n { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 } };\n\n" + }, + { + "chunk_id": "doc_44_chunk_2", + "original_index": 2, + "content": " private static final int ROUNDS = 12;\n\n private static final int BLOCK_LENGTH_BYTES = 128;\n\n private final int digestLength;\n\n private final int keyLength;\n\n private final byte[] buffer;\n\n private final long[] internalState = new long[16];\n\n private int bufferPos = 0;\n\n private long[] chainValue = null;\n\n private long t0 = 0L;\n\n private long t1 = 0L;\n\n private long f0 = 0L;\n\n" + }, + { + "chunk_id": "doc_44_chunk_3", + "original_index": 3, + "content": " /**\n * Basic sized constructor - size in bytes.\n *\n * @param digestSize size of the digest in bytes\n */\n Blake2b(int digestSize)\n {\n if (digestSize < 1 || digestSize > 64)\n {\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n }\n\n buffer = new byte[BLOCK_LENGTH_BYTES];\n keyLength = 0;\n this.digestLength = digestSize;\n init();\n }\n\n" + }, + { + "chunk_id": "doc_44_chunk_4", + "original_index": 4, + "content": " // initialize chainValue\n private void init()\n {\n chainValue = new long[8];\n chainValue[0] = IV[0] ^ (digestLength | ((long) keyLength << 8) | 0x1010000);\n chainValue[1] = IV[1];\n chainValue[2] = IV[2];\n chainValue[3] = IV[3];\n chainValue[4] = IV[4];\n chainValue[5] = IV[5];\n chainValue[6] = IV[6];\n chainValue[7] = IV[7];\n }\n\n" + }, + { + "chunk_id": "doc_44_chunk_5", + "original_index": 5, + "content": " private void initializeInternalState()\n {\n System.arraycopy(chainValue, 0, internalState, 0, chainValue.length);\n System.arraycopy(IV, 0, internalState, chainValue.length, 4);\n internalState[12] = t0 ^ IV[4];\n internalState[13] = t1 ^ IV[5];\n internalState[14] = f0 ^ IV[6];\n internalState[15] = IV[7];// ^ f1 with f1 = 0\n }\n\n" + }, + { + "chunk_id": "doc_44_chunk_6", + "original_index": 6, + "content": " void update(byte[] message)\n {\n if (message == null)\n {\n return;\n }\n update(message, 0, message.length);\n }\n\n /**\n * update the message digest with a block of bytes.\n *\n * @param message the byte array containing the data.\n * @param offset the offset into the byte array where the data starts.\n * @param len the length of the data.\n */\n void update(byte[] message, int offset, int len)\n {\n int remainingLength = 0;\n\n" + }, + { + "chunk_id": "doc_44_chunk_7", + "original_index": 7, + "content": " if (bufferPos != 0)\n {\n remainingLength = BLOCK_LENGTH_BYTES - bufferPos;\n if (remainingLength < len)\n {\n System.arraycopy(message, offset, buffer, bufferPos, remainingLength);\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n bufferPos = 0;\n Arrays.fill(buffer, (byte) 0);// clear buffer\n }\n else\n {\n System.arraycopy(message, offset, buffer, bufferPos, len);\n bufferPos += len;\n return;\n }\n }\n\n" + }, + { + "chunk_id": "doc_44_chunk_8", + "original_index": 8, + "content": " int messagePos;\n int blockWiseLastPos = offset + len - BLOCK_LENGTH_BYTES;\n for (messagePos = offset + remainingLength; messagePos < blockWiseLastPos; messagePos += BLOCK_LENGTH_BYTES)\n {\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(message, messagePos);\n }\n\n" + }, + { + "chunk_id": "doc_44_chunk_9", + "original_index": 9, + "content": " // fill the buffer with left bytes, this might be a full block\n System.arraycopy(message, messagePos, buffer, 0, offset + len - messagePos);\n bufferPos += offset + len - messagePos;\n }\n\n /**\n * close the digest, producing the final digest value. The doFinal\n * call leaves the digest reset.\n * Key, salt and personal string remain.\n *\n * @param out the array the digest is to be copied into.\n * @param outOffset the offset into the out array the digest is to start at.\n */\n void doFinal(byte[] out, int outOffset)\n {\n\n" + }, + { + "chunk_id": "doc_44_chunk_10", + "original_index": 10, + "content": " f0 = 0xFFFFFFFFFFFFFFFFL;\n t0 += bufferPos;\n if (bufferPos > 0 && t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n Arrays.fill(buffer, (byte) 0);// Holds eventually the key if input is null\n Arrays.fill(internalState, 0L);\n\n for (int i = 0; i < chainValue.length && (i * 8 < digestLength); i++)\n {\n byte[] bytes = Utils.longToLittleEndian(chainValue[i]);\n\n" + }, + { + "chunk_id": "doc_44_chunk_11", + "original_index": 11, + "content": " if (i * 8 < digestLength - 8)\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, 8);\n }\n else\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, digestLength - (i * 8));\n }\n }\n\n Arrays.fill(chainValue, 0L);\n\n reset();\n }\n\n" + }, + { + "chunk_id": "doc_44_chunk_12", + "original_index": 12, + "content": " /**\n * Reset the digest back to it's initial state.\n * The key, the salt and the personal string will\n * remain for further computations.\n */\n void reset()\n {\n bufferPos = 0;\n f0 = 0L;\n t0 = 0L;\n t1 = 0L;\n chainValue = null;\n Arrays.fill(buffer, (byte) 0);\n init();\n }\n\n" + }, + { + "chunk_id": "doc_44_chunk_13", + "original_index": 13, + "content": " private void compress(byte[] message, int messagePos)\n {\n\n initializeInternalState();\n\n long[] m = new long[16];\n for (int j = 0; j < 16; j++)\n {\n m[j] = Utils.littleEndianToLong(message, messagePos + j * 8);\n }\n\n for (int round = 0; round < ROUNDS; round++)\n {\n\n" + }, + { + "chunk_id": "doc_44_chunk_14", + "original_index": 14, + "content": " // G apply to columns of internalState:m[blake2b_sigma[round][2 *\n // blockPos]] /+1\n functionG(m[SIGMA[round][0]], m[SIGMA[round][1]], 0, 4, 8, 12);\n functionG(m[SIGMA[round][2]], m[SIGMA[round][3]], 1, 5, 9, 13);\n functionG(m[SIGMA[round][4]], m[SIGMA[round][5]], 2, 6, 10, 14);\n functionG(m[SIGMA[round][6]], m[SIGMA[round][7]], 3, 7, 11, 15);\n // G apply to diagonals of internalState:\n functionG(m[SIGMA[round][8]], m[SIGMA[round][9]], 0, 5, 10, 15);\n functionG(m[SIGMA[round][10]], m[SIGMA[round][11]], 1, 6, 11, 12);\n functionG(m[SIGMA[round][12]], m[SIGMA[round][13]], 2, 7, 8, 13);\n functionG(m[SIGMA[round][14]], m[SIGMA[round][15]], 3, 4, 9, 14);\n }\n\n" + }, + { + "chunk_id": "doc_44_chunk_15", + "original_index": 15, + "content": " // update chain values:\n for (int offset = 0; offset < chainValue.length; offset++)\n {\n chainValue[offset] = chainValue[offset] ^ internalState[offset] ^ internalState[offset + 8];\n }\n }\n\n private void functionG(long m1, long m2, int posA, int posB, int posC, int posD)\n {\n\n" + }, + { + "chunk_id": "doc_44_chunk_16", + "original_index": 16, + "content": " internalState[posA] = internalState[posA] + internalState[posB] + m1;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 32);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 24); // replaces 25 of BLAKE\n internalState[posA] = internalState[posA] + internalState[posB] + m2;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 16);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 63); // replaces 11 of BLAKE\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_45", + "original_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_45_chunk_0", + "original_index": 0, + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n" + }, + { + "chunk_id": "doc_45_chunk_1", + "original_index": 1, + "content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_46", + "original_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\nimport java.util.Objects;\n\n\n/**\n * This class contains all the information computed after\n * calculating a cryptographic hash.\n *

\n * The same {@link HashingFunction} used to generate the hash\n * is used to verify the plain password; in addition cryptographic\n * seasoning such as salt and pepper are stored in this object.\n *

\n * A hash is the product of a one-way function that maps data of arbitrary size to\n * fixed-size values; it is called hashing function (HF).\n * This class represent hashes generated by cryptographic hash function (CHF),\n * where each function has the following properties:\n *

    \n *
  • it is deterministic, meaning that the same message always results in the same hash
  • \n *
  • it is quick to compute the hash value for any given message
  • \n *
  • it is infeasible to generate a message that yields a given hash value
  • \n *
  • it is infeasible to find two different messages with the same hash value
  • \n *
  • a small change to a message should change the hash value so extensively that the new hash value\n * appears uncorrelated with the old hash value
  • \n *
\n *

\n * A salt is a unique, randomly generated string that is added to each password as part of the hashing process.\n * As the salt is unique for every user, an attacker has to crack hashes one at a time using the respective salt,\n * rather than being able to calculate a hash once and compare it against every stored hash.\n *

\n * A pepper can be used in additional to salting to provide an additional layer of protection.\n * It is similar to a salt, but has two key differences:\n *

    \n *
  • The pepper is shared between all stored passwords, rather than being unique like a salt.
  • \n *
  • The pepper is not stored in the database, unlike the salts.
  • \n *
\n *\n * @author David Bertoldi\n * @see
OWASP Password Storage Cheat Sheet\n * @see Key derivation function\n * @see Cryptographic hash function\n * @since 0.1.0\n */\npublic class Hash\n{\n\n /**\n * Represents the full output of a cryptographic hashing function.\n * Depending on the implementation of the CHF, it may contain\n * the salt and the configuration of the CHF itself.\n */\n private byte[] result;\n\n /**\n * Represents the computed output of a cryptographic hashing function.\n * It never contains salt and other configurations.\n */\n private byte[] bytes;\n\n /**\n * Represents the salt: random data that is used as an additional input\n * to a cryptographic hashing function.\n */\n private byte[] salt;\n\n /**\n * Represents the pepper: a secret added to the input password\n * prior to being hashed with a cryptographic hash function\n */\n private CharSequence pepper;\n\n /**\n * Represents the hashing function used to generate this object.\n *\n * @see HashingFunction for more details\n */\n private HashingFunction hashingFunction;\n\n /**\n * It is meant to not be used if not internally.\n * The other constructor must be used instead.\n *\n * @see Hash#Hash(HashingFunction, String, byte[], String)\n * @since 0.1.0\n */\n @SuppressWarnings(\"unused\")\n private Hash()\n {\n //\n }\n\n /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n * @deprecated As of 1.8.1 because of the salt conversion from {@link String} to byte[].\n * {@link Hash#Hash(HashingFunction, String, byte[], byte[])} should be used instead.\n */\n @Deprecated\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, String salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, Utils.fromCharSequenceToBytes(salt));\n }\n\n\n\n /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n */\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, byte[] salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, salt);\n }\n\n /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash as bytes array.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation as bytes array.\n * @since 1.7.0\n */\n public Hash(HashingFunction hashingFunction, byte[] result, byte[] bytes, byte[] salt)\n {\n this.hashingFunction = hashingFunction;\n this.salt = salt;\n this.result = result;\n this.bytes = bytes;\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public String getResult()\n {\n return Utils.fromBytesToString(result);\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public byte[] getResultAsBytes()\n {\n return result;\n }\n\n /**\n * Retrieves the hash as byte array and without the parameters\n * encoded in the final hash.\n *\n * @return the hash.\n * @since 1.5.1\n */\n public byte[] getBytes()\n {\n return bytes;\n }\n\n /**\n * Retrieves the {@link HashingFunction} used\n * to hash the password.\n *\n * @return the CHF\n * @since 0.4.0\n */\n public HashingFunction getHashingFunction()\n {\n return hashingFunction;\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as {@link String}.\n * @since 0.1.0\n */\n public String getSalt()\n {\n return Utils.fromBytesToString(salt);\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as bytes array.\n * @since 1.7.0\n */\n public byte[] getSaltBytes()\n {\n return salt;\n }\n\n /**\n * Retrieves the pepper used with the password in the hashing function.\n *\n * @return the pepper.\n * @since 0.1.0\n */\n public CharSequence getPepper()\n {\n return pepper;\n }\n\n /**\n * Stores the pepper used together with the password in the hashing function.\n *

\n * This methods should be used just after the creation of this object.\n *\n * @param pepper the pepper used.\n * @since 0.1.0\n */\n void setPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n }\n\n /**\n * Produces a human-readable description of the {@link Hash}.\n *\n * @return a readable version of this object\n * @since 0.1.0\n */\n @Override\n public String toString()\n {\n StringBuilder sb = new StringBuilder();\n if (this.hashingFunction != null)\n {\n sb.append(hashingFunction.getClass().getSimpleName());\n }\n sb.append(\"[salt=\").append(getSalt()).append(\", pepper=\").append(getPepper()).append(\", hash=\").append(getResult())\n .append(\"]\");\n return sb.toString();\n }\n\n /**\n * Two {@link Hash}es are considered equals if they contain\n * the same hash, salt, pepper and they are generated with\n * the same {@link HashingFunction}\n *\n * @param obj the object to compare\n * @return true if equals\n * @since 0.1.0\n */\n @Override\n public boolean equals(Object obj)\n {\n if (obj == null || !this.getClass().equals(obj.getClass()))\n {\n return false;\n }\n\n Hash otherHash = (Hash) obj;\n return hasSameValues(otherHash);\n }\n\n private boolean hasSameValues(Hash otherHash)\n {\n return Arrays.equals(this.result, otherHash.result) //\n && Arrays.equals(this.bytes, otherHash.bytes) //\n && Arrays.equals(this.salt, otherHash.salt) //\n && areEquals(this.pepper, otherHash.pepper) //\n && this.hashingFunction.equals(otherHash.hashingFunction);\n }\n\n private static boolean areEquals(CharSequence cs1, CharSequence cs2)\n {\n if (cs1 == cs2)\n {\n return true;\n }\n else if (cs1 != null && cs2 != null)\n {\n return cs1.equals(cs2);\n }\n return false;\n }\n\n @Override\n public int hashCode()\n {\n return Objects.hash(Arrays.hashCode(result), Arrays.hashCode(salt), pepper, hashingFunction);\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_46_chunk_0", + "original_index": 0, + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\nimport java.util.Objects;\n\n" + }, + { + "chunk_id": "doc_46_chunk_1", + "original_index": 1, + "content": "\n/**\n * This class contains all the information computed after\n * calculating a cryptographic hash.\n *

\n * The same {@link HashingFunction} used to generate the hash\n * is used to verify the plain password; in addition cryptographic\n * seasoning such as salt and pepper are stored in this object.\n *

\n * A hash is the product of a one-way function that maps data of arbitrary size to\n * fixed-size values; it is called hashing function (HF).\n * This class represent hashes generated by cryptographic hash function (CHF),\n * where each function has the following properties:\n *

    \n *
  • it is deterministic, meaning that the same message always results in the same hash
  • \n *
  • it is quick to compute the hash value for any given message
  • \n *
  • it is infeasible to generate a message that yields a given hash value
  • \n *
  • it is infeasible to find two different messages with the same hash value
  • \n" + }, + { + "chunk_id": "doc_46_chunk_2", + "original_index": 2, + "content": " *
  • a small change to a message should change the hash value so extensively that the new hash value\n * appears uncorrelated with the old hash value
  • \n *
\n *

\n * A salt is a unique, randomly generated string that is added to each password as part of the hashing process.\n * As the salt is unique for every user, an attacker has to crack hashes one at a time using the respective salt,\n * rather than being able to calculate a hash once and compare it against every stored hash.\n *

\n" + }, + { + "chunk_id": "doc_46_chunk_3", + "original_index": 3, + "content": " * A pepper can be used in additional to salting to provide an additional layer of protection.\n * It is similar to a salt, but has two key differences:\n *

    \n *
  • The pepper is shared between all stored passwords, rather than being unique like a salt.
  • \n *
  • The pepper is not stored in the database, unlike the salts.
  • \n *
\n *\n * @author David Bertoldi\n * @see OWASP Password Storage Cheat Sheet\n * @see Key derivation function\n * @see Cryptographic hash function\n * @since 0.1.0\n */\npublic class Hash\n{\n\n" + }, + { + "chunk_id": "doc_46_chunk_4", + "original_index": 4, + "content": " /**\n * Represents the full output of a cryptographic hashing function.\n * Depending on the implementation of the CHF, it may contain\n * the salt and the configuration of the CHF itself.\n */\n private byte[] result;\n\n /**\n * Represents the computed output of a cryptographic hashing function.\n * It never contains salt and other configurations.\n */\n private byte[] bytes;\n\n /**\n * Represents the salt: random data that is used as an additional input\n * to a cryptographic hashing function.\n */\n private byte[] salt;\n\n" + }, + { + "chunk_id": "doc_46_chunk_5", + "original_index": 5, + "content": " /**\n * Represents the pepper: a secret added to the input password\n * prior to being hashed with a cryptographic hash function\n */\n private CharSequence pepper;\n\n /**\n * Represents the hashing function used to generate this object.\n *\n * @see HashingFunction for more details\n */\n private HashingFunction hashingFunction;\n\n /**\n * It is meant to not be used if not internally.\n * The other constructor must be used instead.\n *\n * @see Hash#Hash(HashingFunction, String, byte[], String)\n * @since 0.1.0\n */\n @SuppressWarnings(\"unused\")\n private Hash()\n {\n //\n }\n\n" + }, + { + "chunk_id": "doc_46_chunk_6", + "original_index": 6, + "content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n" + }, + { + "chunk_id": "doc_46_chunk_7", + "original_index": 7, + "content": " * @deprecated As of 1.8.1 because of the salt conversion from {@link String} to byte[].\n * {@link Hash#Hash(HashingFunction, String, byte[], byte[])} should be used instead.\n */\n @Deprecated\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, String salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, Utils.fromCharSequenceToBytes(salt));\n }\n\n\n\n" + }, + { + "chunk_id": "doc_46_chunk_8", + "original_index": 8, + "content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n */\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, byte[] salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, salt);\n }\n\n" + }, + { + "chunk_id": "doc_46_chunk_9", + "original_index": 9, + "content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash as bytes array.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n" + }, + { + "chunk_id": "doc_46_chunk_10", + "original_index": 10, + "content": " * @param salt the salt used for the computation as bytes array.\n * @since 1.7.0\n */\n public Hash(HashingFunction hashingFunction, byte[] result, byte[] bytes, byte[] salt)\n {\n this.hashingFunction = hashingFunction;\n this.salt = salt;\n this.result = result;\n this.bytes = bytes;\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public String getResult()\n {\n return Utils.fromBytesToString(result);\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public byte[] getResultAsBytes()\n {\n return result;\n }\n\n /**\n * Retrieves the hash as byte array and without the parameters\n * encoded in the final hash.\n *\n * @return the hash.\n * @since 1.5.1\n */\n public byte[] getBytes()\n {\n return bytes;\n }\n\n" + }, + { + "chunk_id": "doc_46_chunk_11", + "original_index": 11, + "content": " /**\n * Retrieves the {@link HashingFunction} used\n * to hash the password.\n *\n * @return the CHF\n * @since 0.4.0\n */\n public HashingFunction getHashingFunction()\n {\n return hashingFunction;\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as {@link String}.\n * @since 0.1.0\n */\n public String getSalt()\n {\n return Utils.fromBytesToString(salt);\n }\n\n" + }, + { + "chunk_id": "doc_46_chunk_12", + "original_index": 12, + "content": " /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as bytes array.\n * @since 1.7.0\n */\n public byte[] getSaltBytes()\n {\n return salt;\n }\n\n /**\n * Retrieves the pepper used with the password in the hashing function.\n *\n * @return the pepper.\n * @since 0.1.0\n */\n public CharSequence getPepper()\n {\n return pepper;\n }\n\n /**\n * Stores the pepper used together with the password in the hashing function.\n *

\n * This methods should be used just after the creation of this object.\n *\n * @param pepper the pepper used.\n * @since 0.1.0\n */\n void setPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n }\n\n" + }, + { + "chunk_id": "doc_46_chunk_13", + "original_index": 13, + "content": " /**\n * Produces a human-readable description of the {@link Hash}.\n *\n * @return a readable version of this object\n * @since 0.1.0\n */\n @Override\n public String toString()\n {\n StringBuilder sb = new StringBuilder();\n if (this.hashingFunction != null)\n {\n sb.append(hashingFunction.getClass().getSimpleName());\n }\n sb.append(\"[salt=\").append(getSalt()).append(\", pepper=\").append(getPepper()).append(\", hash=\").append(getResult())\n .append(\"]\");\n return sb.toString();\n }\n\n" + }, + { + "chunk_id": "doc_46_chunk_14", + "original_index": 14, + "content": " /**\n * Two {@link Hash}es are considered equals if they contain\n * the same hash, salt, pepper and they are generated with\n * the same {@link HashingFunction}\n *\n * @param obj the object to compare\n * @return true if equals\n * @since 0.1.0\n */\n @Override\n public boolean equals(Object obj)\n {\n if (obj == null || !this.getClass().equals(obj.getClass()))\n {\n return false;\n }\n\n" + }, + { + "chunk_id": "doc_46_chunk_15", + "original_index": 15, + "content": " Hash otherHash = (Hash) obj;\n return hasSameValues(otherHash);\n }\n\n private boolean hasSameValues(Hash otherHash)\n {\n return Arrays.equals(this.result, otherHash.result) //\n && Arrays.equals(this.bytes, otherHash.bytes) //\n && Arrays.equals(this.salt, otherHash.salt) //\n && areEquals(this.pepper, otherHash.pepper) //\n && this.hashingFunction.equals(otherHash.hashingFunction);\n }\n\n private static boolean areEquals(CharSequence cs1, CharSequence cs2)\n {\n if (cs1 == cs2)\n {\n return true;\n }\n else if (cs1 != null && cs2 != null)\n {\n return cs1.equals(cs2);\n }\n return false;\n }\n\n @Override\n public int hashCode()\n {\n return Objects.hash(Arrays.hashCode(result), Arrays.hashCode(salt), pepper, hashingFunction);\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_47", + "original_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param salt cryptographic salt\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addSalt(String salt)\n {\n this.salt = Utils.fromCharSequenceToBytes(salt);\n return this;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param saltAsBytes cryptographic salt as bytes array\n * @return this builder\n * @since 1.7.0\n */\n public HashBuilder addSalt(byte[] saltAsBytes)\n {\n this.salt = saltAsBytes;\n return this;\n }\n\n /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

\n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n /**\n * Add a random cryptographic salt in the hashing process with a given length.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param length the length of the salt produced\n * @return this builder\n * @throws BadParametersException if the length is non-positive\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt(int length)\n {\n if (length <= 0)\n {\n throw new BadParametersException(\"Salt cannot have a non-positive length\");\n }\n else\n {\n this.salt = SaltGenerator.generate(length);\n }\n return this;\n }\n\n /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

\n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link BcryptFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getBcryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withBcrypt()\n {\n return with(AlgorithmFinder.getBcryptInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link ScryptFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getScryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withScrypt()\n {\n return with(AlgorithmFinder.getScryptInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link MessageDigestFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.4.0\n */\n public Hash withMessageDigest()\n {\n return with(AlgorithmFinder.getMessageDigestInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link Argon2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.5.0\n */\n public Hash withArgon2()\n {\n return with(AlgorithmFinder.getArgon2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link BalloonHashingFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.8.0\n */\n public Hash withBalloonHashing()\n {\n return with(AlgorithmFinder.getBalloonHashingInstance());\n }\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_47_chunk_0", + "original_index": 0, + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n" + }, + { + "chunk_id": "doc_47_chunk_1", + "original_index": 1, + "content": "/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_2", + "original_index": 2, + "content": " /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param salt cryptographic salt\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addSalt(String salt)\n {\n this.salt = Utils.fromCharSequenceToBytes(salt);\n return this;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param saltAsBytes cryptographic salt as bytes array\n * @return this builder\n * @since 1.7.0\n */\n public HashBuilder addSalt(byte[] saltAsBytes)\n {\n this.salt = saltAsBytes;\n return this;\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_3", + "original_index": 3, + "content": " /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

\n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_4", + "original_index": 4, + "content": " /**\n * Add a random cryptographic salt in the hashing process with a given length.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param length the length of the salt produced\n * @return this builder\n * @throws BadParametersException if the length is non-positive\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt(int length)\n {\n if (length <= 0)\n {\n throw new BadParametersException(\"Salt cannot have a non-positive length\");\n }\n else\n {\n this.salt = SaltGenerator.generate(length);\n }\n return this;\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_5", + "original_index": 5, + "content": " /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_6", + "original_index": 6, + "content": " /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

\n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_7", + "original_index": 7, + "content": " /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_8", + "original_index": 8, + "content": " /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_9", + "original_index": 9, + "content": " /**\n * Hashes the previously given plain text password\n * with {@link BcryptFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getBcryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withBcrypt()\n {\n return with(AlgorithmFinder.getBcryptInstance());\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_10", + "original_index": 10, + "content": " /**\n * Hashes the previously given plain text password\n * with {@link ScryptFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getScryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withScrypt()\n {\n return with(AlgorithmFinder.getScryptInstance());\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_11", + "original_index": 11, + "content": " /**\n * Hashes the previously given plain text password\n * with {@link MessageDigestFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.4.0\n */\n public Hash withMessageDigest()\n {\n return with(AlgorithmFinder.getMessageDigestInstance());\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_12", + "original_index": 12, + "content": " /**\n * Hashes the previously given plain text password\n * with {@link Argon2Function}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.5.0\n */\n public Hash withArgon2()\n {\n return with(AlgorithmFinder.getArgon2Instance());\n }\n\n" + }, + { + "chunk_id": "doc_47_chunk_13", + "original_index": 13, + "content": " /**\n * Hashes the previously given plain text password\n * with {@link BalloonHashingFunction}.\n *

\n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

\n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.8.0\n */\n public Hash withBalloonHashing()\n {\n return with(AlgorithmFinder.getBalloonHashingInstance());\n }\n\n}\n" + } + ] + }, + { + "doc_id": "doc_48", + "original_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_48_chunk_0", + "original_index": 0, + "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n" + }, + { + "chunk_id": "doc_48_chunk_1", + "original_index": 1, + "content": "import com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n" + }, + { + "chunk_id": "doc_48_chunk_2", + "original_index": 2, + "content": "\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n" + }, + { + "chunk_id": "doc_48_chunk_3", + "original_index": 3, + "content": " @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n" + }, + { + "chunk_id": "doc_48_chunk_4", + "original_index": 4, + "content": " // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n" + }, + { + "chunk_id": "doc_48_chunk_5", + "original_index": 5, + "content": " @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n" + }, + { + "chunk_id": "doc_48_chunk_6", + "original_index": 6, + "content": " // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n" + }, + { + "chunk_id": "doc_48_chunk_7", + "original_index": 7, + "content": "\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n" + }, + { + "chunk_id": "doc_48_chunk_8", + "original_index": 8, + "content": "\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n" + }, + { + "chunk_id": "doc_48_chunk_9", + "original_index": 9, + "content": " @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n" + }, + { + "chunk_id": "doc_48_chunk_10", + "original_index": 10, + "content": "\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n" + }, + { + "chunk_id": "doc_48_chunk_11", + "original_index": 11, + "content": " String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n" + }, + { + "chunk_id": "doc_48_chunk_12", + "original_index": 12, + "content": " // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n" + }, + { + "chunk_id": "doc_48_chunk_13", + "original_index": 13, + "content": "\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n" + } + ] + }, + { + "doc_id": "doc_49", + "original_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", + "content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9\u03abI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"\u2019(\u3063\uff3e\u25bf\uff3e)\u06f6\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A\u0669(\u02d8\u25e1\u02d8 ) \u274c\u274c \u274c\u274c\u274c\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"\u0178\u0141\u0100PR\u010c\")\n .addSalt(\"\u0178\u0141\u0100PR\u010cAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_49_chunk_0", + "original_index": 0, + "content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n" + }, + { + "chunk_id": "doc_49_chunk_1", + "original_index": 1, + "content": " boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9\u03abI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n" + }, + { + "chunk_id": "doc_49_chunk_2", + "original_index": 2, + "content": "\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n" + }, + { + "chunk_id": "doc_49_chunk_3", + "original_index": 3, + "content": " (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n" + }, + { + "chunk_id": "doc_49_chunk_4", + "original_index": 4, + "content": " /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n" + }, + { + "chunk_id": "doc_49_chunk_5", + "original_index": 5, + "content": "\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n" + }, + { + "chunk_id": "doc_49_chunk_6", + "original_index": 6, + "content": " // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"\u2019(\u3063\uff3e\u25bf\uff3e)\u06f6\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A\u0669(\u02d8\u25e1\u02d8 ) \u274c\u274c \u274c\u274c\u274c\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n" + }, + { + "chunk_id": "doc_49_chunk_7", + "original_index": 7, + "content": " Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"\u0178\u0141\u0100PR\u010c\")\n .addSalt(\"\u0178\u0141\u0100PR\u010cAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n" + }, + { + "chunk_id": "doc_49_chunk_8", + "original_index": 8, + "content": " private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n" + } + ] + }, + { + "doc_id": "doc_50", + "original_uuid": "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e", + "content": "/*\n * (C) Copyright 2021 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j.types;\n\n/**\n * Enum containing the different variations of Argon2.\n *\n * @author David Bertoldi\n * @see Argon2\n * @since 1.5.0\n */\npublic enum Argon2\n{\n /**\n * It maximizes resistance to GPU cracking attacks.\n * It accesses the memory array in a password dependent order, which reduces the possibility of time\u2013memory trade-off (TMTO) attacks,\n * but introduces possible side-channel attacks\n */\n D,\n\n /**\n * It is optimized to resist side-channel attacks. It accesses the memory array in a password independent order.\n */\n I,\n\n /**\n * It is a hybrid version. It follows the Argon2i approach for the first half pass over memory and the Argon2d approach for subsequent passes.\n * It is recommended to use Argon2id except when there are reasons to prefer one of the other two modes.\n */\n ID;\n\n}\n", + "chunks": [ + { + "chunk_id": "doc_50_chunk_0", + "original_index": 0, + "content": "/*\n * (C) Copyright 2021 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j.types;\n\n" + }, + { + "chunk_id": "doc_50_chunk_1", + "original_index": 1, + "content": "/**\n * Enum containing the different variations of Argon2.\n *\n * @author David Bertoldi\n * @see Argon2\n * @since 1.5.0\n */\npublic enum Argon2\n{\n /**\n * It maximizes resistance to GPU cracking attacks.\n * It accesses the memory array in a password dependent order, which reduces the possibility of time\u2013memory trade-off (TMTO) attacks,\n * but introduces possible side-channel attacks\n */\n D,\n\n /**\n * It is optimized to resist side-channel attacks. It accesses the memory array in a password independent order.\n */\n I,\n\n /**\n * It is a hybrid version. It follows the Argon2i approach for the first half pass over memory and the Argon2d approach for subsequent passes.\n * It is recommended to use Argon2id except when there are reasons to prefer one of the other two modes.\n */\n ID;\n\n}\n" + } + ] + }, + { + "doc_id": "doc_51", + "original_uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/plugins/wasi_crypto/symmetric/tag.h - Symmetric Tag class ===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the Symmetric Tag definition.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"utils/error.h\"\n#include \"utils/secret_vec.h\"\n\n#include \"common/span.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace Symmetric {\n\n/// Authentication tag, that can be verified without channels using the provided\n/// APIs. Very small and no streaming.\n///\n/// More detail:\n/// https://github.com/WebAssembly/wasi-crypto/blob/main/docs/wasi-crypto.md#authentication-tags\nclass Tag {\npublic:\n Tag(Tag &&Data) noexcept = default;\n Tag &operator=(Tag &&Data) noexcept = default;\n Tag(const Tag &Data) noexcept = delete;\n Tag &operator=(const Tag &Data) noexcept = delete;\n\n Tag(SecretVec &&Data) noexcept : Data(std::move(Data)) {}\n\n size_t len() const noexcept { return Data.size(); }\n\n /// The function MUST return `__WASI_CRYPTO_ERRNO_INVALID_TAG` if the\n /// tags don't match.\n WasiCryptoExpect verify(Span RawTag) const noexcept;\n\n WasiCryptoExpect pull(Span Raw) const noexcept;\n\nprivate:\n SecretVec Data;\n};\n\n} // namespace Symmetric\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", + "chunks": [ + { + "chunk_id": "doc_51_chunk_0", + "original_index": 0, + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/plugins/wasi_crypto/symmetric/tag.h - Symmetric Tag class ===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the Symmetric Tag definition.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n" + }, + { + "chunk_id": "doc_51_chunk_1", + "original_index": 1, + "content": "#include \"utils/error.h\"\n#include \"utils/secret_vec.h\"\n\n#include \"common/span.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace Symmetric {\n\n/// Authentication tag, that can be verified without channels using the provided\n/// APIs. Very small and no streaming.\n///\n/// More detail:\n/// https://github.com/WebAssembly/wasi-crypto/blob/main/docs/wasi-crypto.md#authentication-tags\nclass Tag {\npublic:\n Tag(Tag &&Data) noexcept = default;\n Tag &operator=(Tag &&Data) noexcept = default;\n Tag(const Tag &Data) noexcept = delete;\n Tag &operator=(const Tag &Data) noexcept = delete;\n\n" + }, + { + "chunk_id": "doc_51_chunk_2", + "original_index": 2, + "content": " Tag(SecretVec &&Data) noexcept : Data(std::move(Data)) {}\n\n size_t len() const noexcept { return Data.size(); }\n\n /// The function MUST return `__WASI_CRYPTO_ERRNO_INVALID_TAG` if the\n /// tags don't match.\n WasiCryptoExpect verify(Span RawTag) const noexcept;\n\n WasiCryptoExpect pull(Span Raw) const noexcept;\n\nprivate:\n SecretVec Data;\n};\n\n} // namespace Symmetric\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n" + } + ] + }, + { + "doc_id": "doc_52", + "original_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"loader/serialize.h\"\n\n#include \n#include \n#include \n\nnamespace {\n\nWasmEdge::Configure Conf;\nWasmEdge::Loader::Serializer Ser(Conf);\n\nWasmEdge::AST::CodeSection\ncreateCodeSec(std::vector Instructions) {\n WasmEdge::AST::CodeSection CodeSec;\n WasmEdge::AST::CodeSegment CodeSeg;\n WasmEdge::AST::Expression Expr;\n Expr.getInstrs() = Instructions;\n CodeSeg.getExpr() = Expr;\n CodeSec.getContent().push_back(CodeSeg);\n return CodeSec;\n}\n\nTEST(SerializeInstructionTest, SerializeBlockControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 1. Test block control instructions.\n //\n // 1. Serialize block with only end operation.\n // 2. Serialize loop with only end operation.\n // 3. Serialize block with instructions.\n // 4. Serialize loop with instructions.\n\n WasmEdge::AST::Instruction Block(WasmEdge::OpCode::Block);\n WasmEdge::AST::Instruction Loop(WasmEdge::OpCode::Loop);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n Block.setEmptyBlockType();\n Instructions = {Block, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n Loop.setEmptyBlockType();\n Instructions = {Loop, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n Loop.setEmptyBlockType();\n Instructions = {Block, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n Loop.setEmptyBlockType();\n Instructions = {Loop, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeIfElseControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 2. Test serialize if-else control instruction.\n //\n // 1. Serialize if statement with only end operation.\n // 2. Serialize if and else statements with only end operation.\n // 3. Serialize if statement with instructions.\n // 4. Serialize if and else statements with instructions.\n\n WasmEdge::AST::Instruction If(WasmEdge::OpCode::If);\n WasmEdge::AST::Instruction Else(WasmEdge::OpCode::Else);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n If.setEmptyBlockType();\n Instructions = {If, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n If.setEmptyBlockType();\n Instructions = {If, Else, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x05U, // OpCode Else\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, Else,\n I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0EU, // Content size = 14\n 0x01U, // Vector length = 1\n 0x0CU, // Code segment size = 12\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x05U, // OpCode Else\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in else statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeBrControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 3. Test branch control instructions.\n //\n // 1. Serialize valid label index.\n\n WasmEdge::AST::Instruction Br(WasmEdge::OpCode::Br);\n WasmEdge::AST::Instruction BrIf(WasmEdge::OpCode::Br_if);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n Br.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {Br, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x0CU, // OpCode Br.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n BrIf.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrIf, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected[5] = 0x0DU; // OpCode Br_if.\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeBrTableControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 4. Test branch table control instruction.\n //\n // 1. Serialize instruction with empty label vector.\n // 2. Serialize instruction with label vector.\n\n WasmEdge::AST::Instruction BrTable(WasmEdge::OpCode::Br_table);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n BrTable.setLabelListSize(1);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x00U, // Vector length = 0\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n BrTable.setLabelListSize(4);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFF1U;\n BrTable.getLabelList()[1].TargetIndex = 0xFFFFFFF2U;\n BrTable.getLabelList()[2].TargetIndex = 0xFFFFFFF3U;\n BrTable.getLabelList()[3].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x1AU, // Content size = 26\n 0x01U, // Vector length = 1\n 0x18U, // Code segment size = 24\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x03U, // Vector length = 3\n 0xF1U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[0]\n 0xF2U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[1]\n 0xF3U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[2]\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeCallControlInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 5. Test call control instructions.\n //\n // 1. Serialize call instruction with valid type index.\n // 2. Serialize call_indirect instruction with valid type and table index.\n // 3. Serialize call_indirect instruction with invalid table index without\n // Ref-Types proposal.\n\n WasmEdge::AST::Instruction Call(WasmEdge::OpCode::Call);\n WasmEdge::AST::Instruction CallIndirect(WasmEdge::OpCode::Call_indirect);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n Call.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {Call, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x10U, // OpCode Call.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Function type index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n CallIndirect.getTargetIndex() = 0xFFFFFFFFU;\n CallIndirect.getSourceIndex() = 0x05U;\n Instructions = {CallIndirect, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x11U, // OpCode Call_indirect.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Type index.\n 0x05U, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeReferenceInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 6. Test reference instructions.\n //\n // 1. Serialize function reference type.\n // 2. Serialize invalid reference type without Ref-Types proposal.\n\n WasmEdge::AST::Instruction RefNull(WasmEdge::OpCode::Ref__null);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n RefNull.setValType(WasmEdge::TypeCode::FuncRef);\n Instructions = {RefNull, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0xD0U, // OpCode Ref__null.\n 0x70U, // FuncRef\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n RefNull.setValType(WasmEdge::TypeCode::ExternRef);\n Instructions = {RefNull, End};\n EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeParametricInstruction) {\n WasmEdge::Configure ConfNoSIMD;\n ConfNoSIMD.removeProposal(WasmEdge::Proposal::SIMD);\n WasmEdge::Loader::Serializer SerNoSIMD(ConfNoSIMD);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 7. Test parametric instructions.\n //\n // 1. Serialize valid select_t instruction with value type list.\n // 2. Serialize invalid value type list without SIMD proposal.\n\n WasmEdge::AST::Instruction SelectT(WasmEdge::OpCode::Select_t);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n SelectT.setValTypeListSize(2);\n SelectT.getValTypeList()[0] = WasmEdge::TypeCode::I32;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::I64;\n Instructions = {SelectT, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x1CU, // OpCode Select_t.\n 0x02U, // Vector length = 2\n 0x7FU, 0x7EU, // Value types\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n SelectT.getValTypeList()[0] = WasmEdge::TypeCode::V128;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::V128;\n Instructions = {SelectT, End};\n EXPECT_FALSE(SerNoSIMD.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeVariableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 8. Test variable instructions.\n //\n // 1. Serialize valid local or global index.\n\n WasmEdge::AST::Instruction LocalGet(WasmEdge::OpCode::Local__get);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n LocalGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {LocalGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x20U, // OpCode Local__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Local index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeTableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 9. Test table instructions.\n //\n // 1. Serialize table_get instruction.\n // 2. Serialize table_init instruction.\n\n WasmEdge::AST::Instruction TableGet(WasmEdge::OpCode::Table__get);\n WasmEdge::AST::Instruction TableInit(WasmEdge::OpCode::Table__init);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n TableGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x25U, // OpCode Table__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n TableInit.getSourceIndex() = 0x05U;\n TableInit.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableInit, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0CU, // Content size = 12\n 0x01U, // Vector length = 1\n 0x0AU, // Code segment size = 10\n 0x00U, // Local vec(0)\n 0xFCU, 0x0CU, // OpCode Table__init.\n 0x05U, // Element idx.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeMemoryInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 10. Test memory instructions.\n //\n // 1. Serialize memory_grow instruction.\n // 2. Serialize i32_load instruction.\n\n WasmEdge::AST::Instruction MemoryGrow(WasmEdge::OpCode::Memory__grow);\n WasmEdge::AST::Instruction I32Load(WasmEdge::OpCode::I32__load);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n Instructions = {MemoryGrow, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0x40U, // OpCode Memory__grow.\n 0x00U, // Checking byte\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeConstInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 11. Test const numeric instructions.\n //\n // 1. Serialize I32 const numeric instruction.\n // 2. Serialize I64 const numeric instruction.\n // 3. Serialize F32 const numeric instruction.\n // 4. Serialize F64 const numeric instruction.\n\n WasmEdge::AST::Instruction I32Const(WasmEdge::OpCode::I32__const);\n WasmEdge::AST::Instruction I64Const(WasmEdge::OpCode::I64__const);\n WasmEdge::AST::Instruction F32Const(WasmEdge::OpCode::F32__const);\n WasmEdge::AST::Instruction F64Const(WasmEdge::OpCode::F64__const);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n I32Const.setNum(-123456);\n Instructions = {I32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x41U, // OpCode I32__const.\n 0xC0U, 0xBBU, 0x78U, // I32 -123456.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n I64Const.setNum(static_cast(-112233445566L));\n Instructions = {I64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x42U, // OpCode I64__const.\n 0xC2U, 0x8EU, 0xF6U, 0xF2U, 0xDDU, 0x7CU, // I64 -112233445566\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n F32Const.setNum(static_cast(-0x1.921fb4p+1)); // -3.1415926F\n Instructions = {F32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x09U, // Content size = 9\n 0x01U, // Vector length = 1\n 0x07U, // Code segment size = 7\n 0x00U, // Local vec(0)\n 0x43U, // OpCode F32__const.\n 0xDAU, 0x0FU, 0x49U, 0xC0U, // F32 -3.1415926\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n F64Const.setNum(-3.1415926535897932);\n Instructions = {F64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0DU, // Content size = 13\n 0x01U, // Vector length = 1\n 0x0BU, // Code segment size = 11\n 0x00U, // Local vec(0)\n 0x44U, // OpCode F64__const.\n 0x18U, 0x2DU, 0x44U, 0x54U,\n 0xFBU, 0x21U, 0x09U, 0xC0U, // F64 -3.1415926535897932\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n} // namespace\n", + "chunks": [ + { + "chunk_id": "doc_52_chunk_0", + "original_index": 0, + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"loader/serialize.h\"\n\n#include \n#include \n#include \n\nnamespace {\n\nWasmEdge::Configure Conf;\nWasmEdge::Loader::Serializer Ser(Conf);\n\nWasmEdge::AST::CodeSection\ncreateCodeSec(std::vector Instructions) {\n WasmEdge::AST::CodeSection CodeSec;\n WasmEdge::AST::CodeSegment CodeSeg;\n WasmEdge::AST::Expression Expr;\n Expr.getInstrs() = Instructions;\n CodeSeg.getExpr() = Expr;\n CodeSec.getContent().push_back(CodeSeg);\n return CodeSec;\n}\n\n" + }, + { + "chunk_id": "doc_52_chunk_1", + "original_index": 1, + "content": "TEST(SerializeInstructionTest, SerializeBlockControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 1. Test block control instructions.\n //\n // 1. Serialize block with only end operation.\n // 2. Serialize loop with only end operation.\n // 3. Serialize block with instructions.\n // 4. Serialize loop with instructions.\n\n WasmEdge::AST::Instruction Block(WasmEdge::OpCode::Block);\n WasmEdge::AST::Instruction Loop(WasmEdge::OpCode::Loop);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n" + }, + { + "chunk_id": "doc_52_chunk_2", + "original_index": 2, + "content": " Block.setEmptyBlockType();\n Instructions = {Block, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_3", + "original_index": 3, + "content": " Loop.setEmptyBlockType();\n Instructions = {Loop, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_4", + "original_index": 4, + "content": " Loop.setEmptyBlockType();\n Instructions = {Block, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_5", + "original_index": 5, + "content": " Loop.setEmptyBlockType();\n Instructions = {Loop, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n" + }, + { + "chunk_id": "doc_52_chunk_6", + "original_index": 6, + "content": "TEST(SerializeInstructionTest, SerializeIfElseControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 2. Test serialize if-else control instruction.\n //\n // 1. Serialize if statement with only end operation.\n // 2. Serialize if and else statements with only end operation.\n // 3. Serialize if statement with instructions.\n // 4. Serialize if and else statements with instructions.\n\n" + }, + { + "chunk_id": "doc_52_chunk_7", + "original_index": 7, + "content": " WasmEdge::AST::Instruction If(WasmEdge::OpCode::If);\n WasmEdge::AST::Instruction Else(WasmEdge::OpCode::Else);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n If.setEmptyBlockType();\n Instructions = {If, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_8", + "original_index": 8, + "content": " If.setEmptyBlockType();\n Instructions = {If, Else, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x05U, // OpCode Else\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_9", + "original_index": 9, + "content": " If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_10", + "original_index": 10, + "content": " If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, Else,\n I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0EU, // Content size = 14\n 0x01U, // Vector length = 1\n 0x0CU, // Code segment size = 12\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x05U, // OpCode Else\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in else statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n" + }, + { + "chunk_id": "doc_52_chunk_11", + "original_index": 11, + "content": "TEST(SerializeInstructionTest, SerializeBrControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 3. Test branch control instructions.\n //\n // 1. Serialize valid label index.\n\n WasmEdge::AST::Instruction Br(WasmEdge::OpCode::Br);\n WasmEdge::AST::Instruction BrIf(WasmEdge::OpCode::Br_if);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n" + }, + { + "chunk_id": "doc_52_chunk_12", + "original_index": 12, + "content": " Br.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {Br, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x0CU, // OpCode Br.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_13", + "original_index": 13, + "content": " BrIf.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrIf, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected[5] = 0x0DU; // OpCode Br_if.\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeBrTableControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 4. Test branch table control instruction.\n //\n // 1. Serialize instruction with empty label vector.\n // 2. Serialize instruction with label vector.\n\n WasmEdge::AST::Instruction BrTable(WasmEdge::OpCode::Br_table);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n" + }, + { + "chunk_id": "doc_52_chunk_14", + "original_index": 14, + "content": " BrTable.setLabelListSize(1);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x00U, // Vector length = 0\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_15", + "original_index": 15, + "content": " BrTable.setLabelListSize(4);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFF1U;\n BrTable.getLabelList()[1].TargetIndex = 0xFFFFFFF2U;\n BrTable.getLabelList()[2].TargetIndex = 0xFFFFFFF3U;\n BrTable.getLabelList()[3].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n" + }, + { + "chunk_id": "doc_52_chunk_16", + "original_index": 16, + "content": " 0x1AU, // Content size = 26\n 0x01U, // Vector length = 1\n 0x18U, // Code segment size = 24\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x03U, // Vector length = 3\n 0xF1U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[0]\n 0xF2U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[1]\n 0xF3U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[2]\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n" + }, + { + "chunk_id": "doc_52_chunk_17", + "original_index": 17, + "content": "TEST(SerializeInstructionTest, SerializeCallControlInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 5. Test call control instructions.\n //\n // 1. Serialize call instruction with valid type index.\n // 2. Serialize call_indirect instruction with valid type and table index.\n // 3. Serialize call_indirect instruction with invalid table index without\n // Ref-Types proposal.\n\n WasmEdge::AST::Instruction Call(WasmEdge::OpCode::Call);\n WasmEdge::AST::Instruction CallIndirect(WasmEdge::OpCode::Call_indirect);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n" + }, + { + "chunk_id": "doc_52_chunk_18", + "original_index": 18, + "content": " Call.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {Call, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x10U, // OpCode Call.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Function type index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_19", + "original_index": 19, + "content": " CallIndirect.getTargetIndex() = 0xFFFFFFFFU;\n CallIndirect.getSourceIndex() = 0x05U;\n Instructions = {CallIndirect, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x11U, // OpCode Call_indirect.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Type index.\n 0x05U, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_20", + "original_index": 20, + "content": " EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeReferenceInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 6. Test reference instructions.\n //\n // 1. Serialize function reference type.\n // 2. Serialize invalid reference type without Ref-Types proposal.\n\n WasmEdge::AST::Instruction RefNull(WasmEdge::OpCode::Ref__null);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n" + }, + { + "chunk_id": "doc_52_chunk_21", + "original_index": 21, + "content": " RefNull.setValType(WasmEdge::TypeCode::FuncRef);\n Instructions = {RefNull, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0xD0U, // OpCode Ref__null.\n 0x70U, // FuncRef\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_22", + "original_index": 22, + "content": " RefNull.setValType(WasmEdge::TypeCode::ExternRef);\n Instructions = {RefNull, End};\n EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeParametricInstruction) {\n WasmEdge::Configure ConfNoSIMD;\n ConfNoSIMD.removeProposal(WasmEdge::Proposal::SIMD);\n WasmEdge::Loader::Serializer SerNoSIMD(ConfNoSIMD);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 7. Test parametric instructions.\n //\n // 1. Serialize valid select_t instruction with value type list.\n // 2. Serialize invalid value type list without SIMD proposal.\n\n WasmEdge::AST::Instruction SelectT(WasmEdge::OpCode::Select_t);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n" + }, + { + "chunk_id": "doc_52_chunk_23", + "original_index": 23, + "content": " SelectT.setValTypeListSize(2);\n SelectT.getValTypeList()[0] = WasmEdge::TypeCode::I32;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::I64;\n Instructions = {SelectT, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x1CU, // OpCode Select_t.\n 0x02U, // Vector length = 2\n 0x7FU, 0x7EU, // Value types\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_24", + "original_index": 24, + "content": " SelectT.getValTypeList()[0] = WasmEdge::TypeCode::V128;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::V128;\n Instructions = {SelectT, End};\n EXPECT_FALSE(SerNoSIMD.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeVariableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 8. Test variable instructions.\n //\n // 1. Serialize valid local or global index.\n\n WasmEdge::AST::Instruction LocalGet(WasmEdge::OpCode::Local__get);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n" + }, + { + "chunk_id": "doc_52_chunk_25", + "original_index": 25, + "content": " LocalGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {LocalGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x20U, // OpCode Local__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Local index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n" + }, + { + "chunk_id": "doc_52_chunk_26", + "original_index": 26, + "content": "TEST(SerializeInstructionTest, SerializeTableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 9. Test table instructions.\n //\n // 1. Serialize table_get instruction.\n // 2. Serialize table_init instruction.\n\n WasmEdge::AST::Instruction TableGet(WasmEdge::OpCode::Table__get);\n WasmEdge::AST::Instruction TableInit(WasmEdge::OpCode::Table__init);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n" + }, + { + "chunk_id": "doc_52_chunk_27", + "original_index": 27, + "content": " TableGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x25U, // OpCode Table__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_28", + "original_index": 28, + "content": " TableInit.getSourceIndex() = 0x05U;\n TableInit.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableInit, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0CU, // Content size = 12\n 0x01U, // Vector length = 1\n 0x0AU, // Code segment size = 10\n 0x00U, // Local vec(0)\n 0xFCU, 0x0CU, // OpCode Table__init.\n 0x05U, // Element idx.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n" + }, + { + "chunk_id": "doc_52_chunk_29", + "original_index": 29, + "content": "TEST(SerializeInstructionTest, SerializeMemoryInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 10. Test memory instructions.\n //\n // 1. Serialize memory_grow instruction.\n // 2. Serialize i32_load instruction.\n\n WasmEdge::AST::Instruction MemoryGrow(WasmEdge::OpCode::Memory__grow);\n WasmEdge::AST::Instruction I32Load(WasmEdge::OpCode::I32__load);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n Instructions = {MemoryGrow, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0x40U, // OpCode Memory__grow.\n 0x00U, // Checking byte\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_30", + "original_index": 30, + "content": " I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_31", + "original_index": 31, + "content": " I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n" + }, + { + "chunk_id": "doc_52_chunk_32", + "original_index": 32, + "content": "TEST(SerializeInstructionTest, SerializeConstInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 11. Test const numeric instructions.\n //\n // 1. Serialize I32 const numeric instruction.\n // 2. Serialize I64 const numeric instruction.\n // 3. Serialize F32 const numeric instruction.\n // 4. Serialize F64 const numeric instruction.\n\n WasmEdge::AST::Instruction I32Const(WasmEdge::OpCode::I32__const);\n WasmEdge::AST::Instruction I64Const(WasmEdge::OpCode::I64__const);\n WasmEdge::AST::Instruction F32Const(WasmEdge::OpCode::F32__const);\n WasmEdge::AST::Instruction F64Const(WasmEdge::OpCode::F64__const);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n" + }, + { + "chunk_id": "doc_52_chunk_33", + "original_index": 33, + "content": " I32Const.setNum(-123456);\n Instructions = {I32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x41U, // OpCode I32__const.\n 0xC0U, 0xBBU, 0x78U, // I32 -123456.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_34", + "original_index": 34, + "content": " I64Const.setNum(static_cast(-112233445566L));\n Instructions = {I64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x42U, // OpCode I64__const.\n 0xC2U, 0x8EU, 0xF6U, 0xF2U, 0xDDU, 0x7CU, // I64 -112233445566\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_35", + "original_index": 35, + "content": " F32Const.setNum(static_cast(-0x1.921fb4p+1)); // -3.1415926F\n Instructions = {F32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x09U, // Content size = 9\n 0x01U, // Vector length = 1\n 0x07U, // Code segment size = 7\n 0x00U, // Local vec(0)\n 0x43U, // OpCode F32__const.\n 0xDAU, 0x0FU, 0x49U, 0xC0U, // F32 -3.1415926\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n" + }, + { + "chunk_id": "doc_52_chunk_36", + "original_index": 36, + "content": " F64Const.setNum(-3.1415926535897932);\n Instructions = {F64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0DU, // Content size = 13\n 0x01U, // Vector length = 1\n 0x0BU, // Code segment size = 11\n 0x00U, // Local vec(0)\n 0x44U, // OpCode F64__const.\n 0x18U, 0x2DU, 0x44U, 0x54U,\n 0xFBU, 0x21U, 0x09U, 0xC0U, // F64 -3.1415926535897932\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n} // namespace\n" + } + ] + }, + { + "doc_id": "doc_53", + "original_uuid": "adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823", + "content": "#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiLoggingMock {\n\nusing namespace std::literals;\n\nclass Log : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"wasi-logging\"sv);\n return Unexpect(ErrCode::Value::HostFuncError);\n }\n};\n\n} // namespace WasiLoggingMock\n} // namespace Host\n} // namespace WasmEdge", + "chunks": [ + { + "chunk_id": "doc_53_chunk_0", + "original_index": 0, + "content": "#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiLoggingMock {\n\nusing namespace std::literals;\n\nclass Log : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"wasi-logging\"sv);\n return Unexpect(ErrCode::Value::HostFuncError);\n }\n};\n\n} // namespace WasiLoggingMock\n} // namespace Host\n} // namespace WasmEdge" + } + ] + }, + { + "doc_id": "doc_54", + "original_uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"common/defines.h\"\n#if WASMEDGE_OS_MACOS\n\n#include \"common/errcode.h\"\n#include \"host/wasi/environ.h\"\n#include \"macos.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WASI {\n\nWasiExpect Environ::procRaise(__wasi_signal_t Signal) const noexcept {\n int SysSignal;\n switch (Signal) {\n case __WASI_SIGNAL_NONE:\n SysSignal = 0;\n break;\n case __WASI_SIGNAL_HUP:\n SysSignal = SIGHUP;\n break;\n case __WASI_SIGNAL_INT:\n SysSignal = SIGINT;\n break;\n case __WASI_SIGNAL_QUIT:\n SysSignal = SIGQUIT;\n break;\n case __WASI_SIGNAL_ILL:\n SysSignal = SIGILL;\n break;\n case __WASI_SIGNAL_TRAP:\n SysSignal = SIGTRAP;\n break;\n case __WASI_SIGNAL_ABRT:\n SysSignal = SIGABRT;\n break;\n case __WASI_SIGNAL_BUS:\n SysSignal = SIGBUS;\n break;\n case __WASI_SIGNAL_FPE:\n SysSignal = SIGFPE;\n break;\n case __WASI_SIGNAL_KILL:\n SysSignal = SIGKILL;\n break;\n case __WASI_SIGNAL_USR1:\n SysSignal = SIGUSR1;\n break;\n case __WASI_SIGNAL_SEGV:\n SysSignal = SIGSEGV;\n break;\n case __WASI_SIGNAL_USR2:\n SysSignal = SIGUSR2;\n break;\n case __WASI_SIGNAL_PIPE:\n SysSignal = SIGPIPE;\n break;\n case __WASI_SIGNAL_ALRM:\n SysSignal = SIGALRM;\n break;\n case __WASI_SIGNAL_TERM:\n SysSignal = SIGTERM;\n break;\n case __WASI_SIGNAL_CHLD:\n SysSignal = SIGCHLD;\n break;\n case __WASI_SIGNAL_CONT:\n SysSignal = SIGCONT;\n break;\n case __WASI_SIGNAL_STOP:\n SysSignal = SIGSTOP;\n break;\n case __WASI_SIGNAL_TSTP:\n SysSignal = SIGTSTP;\n break;\n case __WASI_SIGNAL_TTIN:\n SysSignal = SIGTTIN;\n break;\n case __WASI_SIGNAL_TTOU:\n SysSignal = SIGTTOU;\n break;\n case __WASI_SIGNAL_URG:\n SysSignal = SIGURG;\n break;\n case __WASI_SIGNAL_XCPU:\n SysSignal = SIGXCPU;\n break;\n case __WASI_SIGNAL_XFSZ:\n SysSignal = SIGXFSZ;\n break;\n case __WASI_SIGNAL_VTALRM:\n SysSignal = SIGVTALRM;\n break;\n case __WASI_SIGNAL_PROF:\n SysSignal = SIGPROF;\n break;\n case __WASI_SIGNAL_WINCH:\n SysSignal = SIGWINCH;\n break;\n case __WASI_SIGNAL_SYS:\n SysSignal = SIGSYS;\n break;\n case __WASI_SIGNAL_POLL:\n case __WASI_SIGNAL_PWR:\n default:\n return WasiUnexpect(__WASI_ERRNO_NOTSUP);\n }\n if (auto Res = std::raise(SysSignal); Res != 0) {\n return WasiUnexpect(fromErrNo(errno));\n }\n return {};\n}\n\nWasiExpect Environ::schedYield() const noexcept {\n ::sched_yield();\n return {};\n}\n\n} // namespace WASI\n} // namespace Host\n} // namespace WasmEdge\n\n#endif\n", + "chunks": [ + { + "chunk_id": "doc_54_chunk_0", + "original_index": 0, + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"common/defines.h\"\n#if WASMEDGE_OS_MACOS\n\n#include \"common/errcode.h\"\n#include \"host/wasi/environ.h\"\n#include \"macos.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WASI {\n\nWasiExpect Environ::procRaise(__wasi_signal_t Signal) const noexcept {\n int SysSignal;\n switch (Signal) {\n case __WASI_SIGNAL_NONE:\n SysSignal = 0;\n break;\n case __WASI_SIGNAL_HUP:\n SysSignal = SIGHUP;\n break;\n case __WASI_SIGNAL_INT:\n SysSignal = SIGINT;\n break;\n case __WASI_SIGNAL_QUIT:\n SysSignal = SIGQUIT;\n break;\n case __WASI_SIGNAL_ILL:\n SysSignal = SIGILL;\n break;\n case __WASI_SIGNAL_TRAP:\n SysSignal = SIGTRAP;\n break;\n case __WASI_SIGNAL_ABRT:\n SysSignal = SIGABRT;\n break;\n case __WASI_SIGNAL_BUS:\n SysSignal = SIGBUS;\n break;\n case __WASI_SIGNAL_FPE:\n SysSignal = SIGFPE;\n break;\n" + }, + { + "chunk_id": "doc_54_chunk_1", + "original_index": 1, + "content": " case __WASI_SIGNAL_KILL:\n SysSignal = SIGKILL;\n break;\n case __WASI_SIGNAL_USR1:\n SysSignal = SIGUSR1;\n break;\n case __WASI_SIGNAL_SEGV:\n SysSignal = SIGSEGV;\n break;\n case __WASI_SIGNAL_USR2:\n SysSignal = SIGUSR2;\n break;\n case __WASI_SIGNAL_PIPE:\n SysSignal = SIGPIPE;\n break;\n case __WASI_SIGNAL_ALRM:\n SysSignal = SIGALRM;\n break;\n case __WASI_SIGNAL_TERM:\n SysSignal = SIGTERM;\n break;\n case __WASI_SIGNAL_CHLD:\n SysSignal = SIGCHLD;\n break;\n case __WASI_SIGNAL_CONT:\n SysSignal = SIGCONT;\n break;\n case __WASI_SIGNAL_STOP:\n SysSignal = SIGSTOP;\n break;\n case __WASI_SIGNAL_TSTP:\n SysSignal = SIGTSTP;\n break;\n case __WASI_SIGNAL_TTIN:\n SysSignal = SIGTTIN;\n break;\n case __WASI_SIGNAL_TTOU:\n SysSignal = SIGTTOU;\n break;\n case __WASI_SIGNAL_URG:\n SysSignal = SIGURG;\n break;\n case __WASI_SIGNAL_XCPU:\n SysSignal = SIGXCPU;\n break;\n" + }, + { + "chunk_id": "doc_54_chunk_2", + "original_index": 2, + "content": " case __WASI_SIGNAL_XFSZ:\n SysSignal = SIGXFSZ;\n break;\n case __WASI_SIGNAL_VTALRM:\n SysSignal = SIGVTALRM;\n break;\n case __WASI_SIGNAL_PROF:\n SysSignal = SIGPROF;\n break;\n case __WASI_SIGNAL_WINCH:\n SysSignal = SIGWINCH;\n break;\n case __WASI_SIGNAL_SYS:\n SysSignal = SIGSYS;\n break;\n case __WASI_SIGNAL_POLL:\n case __WASI_SIGNAL_PWR:\n default:\n return WasiUnexpect(__WASI_ERRNO_NOTSUP);\n }\n if (auto Res = std::raise(SysSignal); Res != 0) {\n return WasiUnexpect(fromErrNo(errno));\n }\n return {};\n}\n\nWasiExpect Environ::schedYield() const noexcept {\n ::sched_yield();\n return {};\n}\n\n} // namespace WASI\n} // namespace Host\n} // namespace WasmEdge\n\n#endif\n" + } + ] + }, + { + "doc_id": "doc_55", + "original_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCryptoMock {\n\nusing namespace std::literals;\nstatic inline constexpr const uint32_t kWASICryptoError = 1U;\n\nnamespace Common {\nclass ArrayOutputLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass ArrayOutputPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetGuestBuffer\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerInvalidate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Common\n\nnamespace AsymmetricCommon {\nclass KeypairGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairGenerateManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairReplaceManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromPkAndSk : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairPublickey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairSecretkey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyFromSecretkey\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace AsymmetricCommon\n\nnamespace Kx {\nclass Dh : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Encapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Decapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Kx\n\nnamespace Signatures {\nclass Export : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Import : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateUpdate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSign : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateOpen\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateUpdate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateVerify\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateClose\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Close : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n} // namespace Signatures\n\nnamespace Symmetric {\nclass KeyGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyGenerateManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyReplaceManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClone : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateAbsorb : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueeze : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeTag : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeKey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateMaxTagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateRatchet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Symmetric\n\n} // namespace WasiCryptoMock\n} // namespace Host\n} // namespace WasmEdge\n", + "chunks": [ + { + "chunk_id": "doc_55_chunk_0", + "original_index": 0, + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCryptoMock {\n\nusing namespace std::literals;\nstatic inline constexpr const uint32_t kWASICryptoError = 1U;\n\nnamespace Common {\nclass ArrayOutputLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass ArrayOutputPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_1", + "original_index": 1, + "content": "class OptionsOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_2", + "original_index": 2, + "content": "class OptionsSet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_3", + "original_index": 3, + "content": "class OptionsSetGuestBuffer\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_4", + "original_index": 4, + "content": "class SecretsManagerInvalidate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Common\n\nnamespace AsymmetricCommon {\nclass KeypairGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_5", + "original_index": 5, + "content": "class KeypairGenerateManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_6", + "original_index": 6, + "content": "class KeypairReplaceManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_7", + "original_index": 7, + "content": "class KeypairFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromPkAndSk : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_8", + "original_index": 8, + "content": "class KeypairExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairPublickey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_9", + "original_index": 9, + "content": "class KeypairSecretkey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_10", + "original_index": 10, + "content": "class PublickeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_11", + "original_index": 11, + "content": "class PublickeyFromSecretkey\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_12", + "original_index": 12, + "content": "class SecretkeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_13", + "original_index": 13, + "content": "class SecretkeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace AsymmetricCommon\n\nnamespace Kx {\nclass Dh : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_14", + "original_index": 14, + "content": "class Encapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Decapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Kx\n\n" + }, + { + "chunk_id": "doc_55_chunk_15", + "original_index": 15, + "content": "namespace Signatures {\nclass Export : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Import : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_16", + "original_index": 16, + "content": "class StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateUpdate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_17", + "original_index": 17, + "content": "class StateSign : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_18", + "original_index": 18, + "content": "class VerificationStateOpen\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateUpdate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_19", + "original_index": 19, + "content": "class VerificationStateVerify\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateClose\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Close : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n} // namespace Signatures\n\n" + }, + { + "chunk_id": "doc_55_chunk_20", + "original_index": 20, + "content": "namespace Symmetric {\nclass KeyGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_21", + "original_index": 21, + "content": "class KeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyGenerateManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_22", + "original_index": 22, + "content": "class KeyStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyReplaceManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_23", + "original_index": 23, + "content": "class KeyId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_24", + "original_index": 24, + "content": "class StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClone : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_25", + "original_index": 25, + "content": "class StateOptionsGet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_26", + "original_index": 26, + "content": "class StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateAbsorb : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_27", + "original_index": 27, + "content": "class StateSqueeze : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeTag : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_28", + "original_index": 28, + "content": "class StateSqueezeKey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateMaxTagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_29", + "original_index": 29, + "content": "class StateEncrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_30", + "original_index": 30, + "content": "class StateDecrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_31", + "original_index": 31, + "content": "class StateRatchet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n" + }, + { + "chunk_id": "doc_55_chunk_32", + "original_index": 32, + "content": "class TagPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Symmetric\n\n} // namespace WasiCryptoMock\n} // namespace Host\n} // namespace WasmEdge\n" + } + ] + }, + { + "doc_id": "doc_56", + "original_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n FunctionSection &getFunctionSection() { return FunctionSec; }\n const TableSection &getTableSection() const { return TableSec; }\n TableSection &getTableSection() { return TableSec; }\n const MemorySection &getMemorySection() const { return MemorySec; }\n MemorySection &getMemorySection() { return MemorySec; }\n const GlobalSection &getGlobalSection() const { return GlobalSec; }\n GlobalSection &getGlobalSection() { return GlobalSec; }\n const ExportSection &getExportSection() const { return ExportSec; }\n ExportSection &getExportSection() { return ExportSec; }\n const StartSection &getStartSection() const { return StartSec; }\n StartSection &getStartSection() { return StartSec; }\n const ElementSection &getElementSection() const { return ElementSec; }\n ElementSection &getElementSection() { return ElementSec; }\n const CodeSection &getCodeSection() const { return CodeSec; }\n CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n /// \\name Section nodes of Module node.\n /// @{\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec;\n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec;\n DataSection DataSec;\n DataCountSection DataCountSec;\n /// @}\n\n /// \\name Data of AOT.\n /// @{\n AOTSection AOTSec;\n Symbol IntrSymbol;\n /// @}\n\n /// \\name Validated flag.\n /// @{\n bool IsValidated = false;\n /// @}\n};\n\nclass CoreModuleSection : public Section {\npublic:\n /// Getter of content.\n const Module &getContent() const noexcept { return Content; }\n Module &getContent() noexcept { return Content; }\n\nprivate:\n Module Content;\n};\n\nnamespace Component {\n\nclass Component {\n using Section =\n std::variant;\n\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n /// Getter of layer vector.\n const std::vector &getLayer() const noexcept { return Layer; }\n std::vector &getLayer() noexcept { return Layer; }\n\n std::vector

&getSections() noexcept { return Secs; }\n Span getSections() const noexcept { return Secs; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n std::vector Layer;\n\n std::vector
Secs;\n /// @}\n};\n\n} // namespace Component\n\n} // namespace AST\n} // namespace WasmEdge\n", + "chunks": [ + { + "chunk_id": "doc_56_chunk_0", + "original_index": 0, + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n" + }, + { + "chunk_id": "doc_56_chunk_1", + "original_index": 1, + "content": " /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n" + }, + { + "chunk_id": "doc_56_chunk_2", + "original_index": 2, + "content": " FunctionSection &getFunctionSection() { return FunctionSec; }\n const TableSection &getTableSection() const { return TableSec; }\n TableSection &getTableSection() { return TableSec; }\n const MemorySection &getMemorySection() const { return MemorySec; }\n MemorySection &getMemorySection() { return MemorySec; }\n const GlobalSection &getGlobalSection() const { return GlobalSec; }\n GlobalSection &getGlobalSection() { return GlobalSec; }\n const ExportSection &getExportSection() const { return ExportSec; }\n ExportSection &getExportSection() { return ExportSec; }\n const StartSection &getStartSection() const { return StartSec; }\n StartSection &getStartSection() { return StartSec; }\n const ElementSection &getElementSection() const { return ElementSec; }\n ElementSection &getElementSection() { return ElementSec; }\n const CodeSection &getCodeSection() const { return CodeSec; }\n" + }, + { + "chunk_id": "doc_56_chunk_3", + "original_index": 3, + "content": " CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n" + }, + { + "chunk_id": "doc_56_chunk_4", + "original_index": 4, + "content": " /// \\name Section nodes of Module node.\n /// @{\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec;\n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec;\n DataSection DataSec;\n DataCountSection DataCountSec;\n /// @}\n\n /// \\name Data of AOT.\n /// @{\n AOTSection AOTSec;\n Symbol IntrSymbol;\n /// @}\n\n /// \\name Validated flag.\n /// @{\n bool IsValidated = false;\n /// @}\n};\n\nclass CoreModuleSection : public Section {\npublic:\n /// Getter of content.\n const Module &getContent() const noexcept { return Content; }\n Module &getContent() noexcept { return Content; }\n\nprivate:\n Module Content;\n};\n\nnamespace Component {\n\n" + }, + { + "chunk_id": "doc_56_chunk_5", + "original_index": 5, + "content": "class Component {\n using Section =\n std::variant;\n\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n" + }, + { + "chunk_id": "doc_56_chunk_6", + "original_index": 6, + "content": " /// Getter of layer vector.\n const std::vector &getLayer() const noexcept { return Layer; }\n std::vector &getLayer() noexcept { return Layer; }\n\n std::vector
&getSections() noexcept { return Secs; }\n Span getSections() const noexcept { return Secs; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n std::vector Layer;\n\n std::vector
Secs;\n /// @}\n};\n\n} // namespace Component\n\n} // namespace AST\n} // namespace WasmEdge\n" + } + ] + }, + { + "doc_id": "doc_57", + "original_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", + "content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\nclass AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetDouble : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetDouble(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetImageSize : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetImageSize(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetPixelFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetPixelFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetSampleFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetSampleFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\nclass AVUtilVersion : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilVersion(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &);\n};\n\nclass AVGetChannelLayoutNbChannels\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNbChannels(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutNameLen\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNameLen(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutName\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutName(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId, uint32_t NamePtr,\n uint32_t NameLen);\n};\n\nclass AVGetChannelLayoutMask\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutMask(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetDefaultChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetDefaultChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n int32_t ChannelLayoutId);\n};\n\nclass AVUtilConfigurationLength\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfigurationLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilConfiguration : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfiguration(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t ConfigPtr,\n uint32_t ConfigLen);\n};\n\nclass AVUtilLicenseLength : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicenseLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilLicense : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicense(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t LicensePtr,\n uint32_t LicenseLen);\n};\n\n} // namespace AVUtil\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", + "chunks": [ + { + "chunk_id": "doc_57_chunk_0", + "original_index": 0, + "content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\n" + }, + { + "chunk_id": "doc_57_chunk_1", + "original_index": 1, + "content": "class AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n" + }, + { + "chunk_id": "doc_57_chunk_2", + "original_index": 2, + "content": "class AVOptSetDouble : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetDouble(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetImageSize : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetImageSize(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n" + }, + { + "chunk_id": "doc_57_chunk_3", + "original_index": 3, + "content": "class AVOptSetPixelFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetPixelFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetSampleFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetSampleFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n" + }, + { + "chunk_id": "doc_57_chunk_4", + "original_index": 4, + "content": "class AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\n" + }, + { + "chunk_id": "doc_57_chunk_5", + "original_index": 5, + "content": "class AVUtilVersion : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilVersion(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &);\n};\n\nclass AVGetChannelLayoutNbChannels\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNbChannels(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutNameLen\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNameLen(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n" + }, + { + "chunk_id": "doc_57_chunk_6", + "original_index": 6, + "content": "class AVGetChannelLayoutName\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutName(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId, uint32_t NamePtr,\n uint32_t NameLen);\n};\n\nclass AVGetChannelLayoutMask\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutMask(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n" + }, + { + "chunk_id": "doc_57_chunk_7", + "original_index": 7, + "content": "class AVGetDefaultChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetDefaultChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n int32_t ChannelLayoutId);\n};\n\nclass AVUtilConfigurationLength\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfigurationLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n" + }, + { + "chunk_id": "doc_57_chunk_8", + "original_index": 8, + "content": "class AVUtilConfiguration : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfiguration(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t ConfigPtr,\n uint32_t ConfigLen);\n};\n\nclass AVUtilLicenseLength : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicenseLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilLicense : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicense(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t LicensePtr,\n uint32_t LicenseLen);\n};\n\n} // namespace AVUtil\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n" + } + ] + }, + { + "doc_id": "doc_58", + "original_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", + "chunks": [ + { + "chunk_id": "doc_58_chunk_0", + "original_index": 0, + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n" + }, + { + "chunk_id": "doc_58_chunk_1", + "original_index": 1, + "content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n" + } + ] + }, + { + "doc_id": "doc_59", + "original_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", + "content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\nExpect AVChapterSetStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n int64_t StartValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->start = StartValue;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->end;\n}\n\nExpect AVChapterSetEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t EndValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->end = EndValue;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterMetadata::body(const Runtime::CallingFrame &Frame,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, uint32_t DictPtr) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(DictId, MemInst, uint32_t, DictPtr,\n \"Failed when accessing the return AVDictionary memory\"sv);\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n\n AVDictionary **AvDictionary =\n static_cast(av_malloc(sizeof(AVDictionary *)));\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n *AvDictionary = (*AvChapter)->metadata;\n FFMPEG_PTR_STORE(AvDictionary, DictId);\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetMetadata::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n uint32_t DictId) {\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n FFMPEG_PTR_FETCH(AvDictionary, DictId, AVDictionary *);\n\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n if (AvDictionary == nullptr)\n (*AvChapter)->metadata = nullptr;\n else\n (*AvChapter)->metadata = *AvDictionary;\n return static_cast(ErrNo::Success);\n}\n\n} // namespace AVFormat\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", + "chunks": [ + { + "chunk_id": "doc_59_chunk_0", + "original_index": 0, + "content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n" + }, + { + "chunk_id": "doc_59_chunk_1", + "original_index": 1, + "content": " FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n" + }, + { + "chunk_id": "doc_59_chunk_2", + "original_index": 2, + "content": " AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\n" + }, + { + "chunk_id": "doc_59_chunk_3", + "original_index": 3, + "content": "Expect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\n" + }, + { + "chunk_id": "doc_59_chunk_4", + "original_index": 4, + "content": "Expect AVChapterSetStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n int64_t StartValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->start = StartValue;\n return static_cast(ErrNo::Success);\n}\n\n" + }, + { + "chunk_id": "doc_59_chunk_5", + "original_index": 5, + "content": "Expect AVChapterEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->end;\n}\n\n" + }, + { + "chunk_id": "doc_59_chunk_6", + "original_index": 6, + "content": "Expect AVChapterSetEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t EndValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->end = EndValue;\n return static_cast(ErrNo::Success);\n}\n\n" + }, + { + "chunk_id": "doc_59_chunk_7", + "original_index": 7, + "content": "Expect AVChapterMetadata::body(const Runtime::CallingFrame &Frame,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, uint32_t DictPtr) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(DictId, MemInst, uint32_t, DictPtr,\n \"Failed when accessing the return AVDictionary memory\"sv);\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n\n AVDictionary **AvDictionary =\n static_cast(av_malloc(sizeof(AVDictionary *)));\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n" + }, + { + "chunk_id": "doc_59_chunk_8", + "original_index": 8, + "content": " // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n *AvDictionary = (*AvChapter)->metadata;\n FFMPEG_PTR_STORE(AvDictionary, DictId);\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetMetadata::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n uint32_t DictId) {\n\n" + }, + { + "chunk_id": "doc_59_chunk_9", + "original_index": 9, + "content": " FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n FFMPEG_PTR_FETCH(AvDictionary, DictId, AVDictionary *);\n\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n if (AvDictionary == nullptr)\n (*AvChapter)->metadata = nullptr;\n else\n (*AvChapter)->metadata = *AvDictionary;\n return static_cast(ErrNo::Success);\n}\n\n} // namespace AVFormat\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n" + } + ] + }, + { + "doc_id": "doc_60", + "original_uuid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"asymmetric_common/publickey.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace AsymmetricCommon {\n\nWasiCryptoExpect\nimportPk(AsymmetricCommon::Algorithm Alg, Span Encoded,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [=](auto Factory) noexcept -> WasiCryptoExpect {\n return decltype(Factory)::PublicKey::import(Encoded, Encoding);\n },\n Alg);\n}\n\nWasiCryptoExpect>\npkExportData(const PkVariant &PkVariant,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [Encoding](const auto &Pk) noexcept { return Pk.exportData(Encoding); },\n PkVariant);\n}\n\nWasiCryptoExpect pkVerify(const PkVariant &PkVariant) noexcept {\n return std::visit([](const auto &Pk) noexcept { return Pk.verify(); },\n PkVariant);\n}\n\n} // namespace AsymmetricCommon\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", + "chunks": [ + { + "chunk_id": "doc_60_chunk_0", + "original_index": 0, + "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"asymmetric_common/publickey.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace AsymmetricCommon {\n\nWasiCryptoExpect\nimportPk(AsymmetricCommon::Algorithm Alg, Span Encoded,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [=](auto Factory) noexcept -> WasiCryptoExpect {\n return decltype(Factory)::PublicKey::import(Encoded, Encoding);\n },\n Alg);\n}\n\n" + }, + { + "chunk_id": "doc_60_chunk_1", + "original_index": 1, + "content": "WasiCryptoExpect>\npkExportData(const PkVariant &PkVariant,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [Encoding](const auto &Pk) noexcept { return Pk.exportData(Encoding); },\n PkVariant);\n}\n\nWasiCryptoExpect pkVerify(const PkVariant &PkVariant) noexcept {\n return std::visit([](const auto &Pk) noexcept { return Pk.verify(); },\n PkVariant);\n}\n\n} // namespace AsymmetricCommon\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n" + } + ] + }, + { + "doc_id": "doc_61", + "original_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", + "content": "use std::ffi::OsStr;\nuse std::io::{self, Error, ErrorKind, Result};\nuse std::iter::once;\nuse std::os::windows::ffi::OsStrExt;\nuse std::sync::mpsc::TryRecvError;\nuse std::sync::Arc;\n\nuse crate::event::{OnResize, WindowSize};\nuse crate::tty::windows::child::ChildExitWatcher;\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options, Shell};\n\nmod blocking;\nmod child;\nmod conpty;\n\nuse blocking::{UnblockedReader, UnblockedWriter};\nuse conpty::Conpty as Backend;\nuse miow::pipe::{AnonRead, AnonWrite};\nuse polling::{Event, Poller};\n\npub const PTY_CHILD_EVENT_TOKEN: usize = 1;\npub const PTY_READ_WRITE_TOKEN: usize = 2;\n\ntype ReadPipe = UnblockedReader;\ntype WritePipe = UnblockedWriter;\n\npub struct Pty {\n // XXX: Backend is required to be the first field, to ensure correct drop order. Dropping\n // `conout` before `backend` will cause a deadlock (with Conpty).\n backend: Backend,\n conout: ReadPipe,\n conin: WritePipe,\n child_watcher: ChildExitWatcher,\n}\n\npub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result {\n conpty::new(config, window_size)\n .ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n}\n\nimpl Pty {\n fn new(\n backend: impl Into,\n conout: impl Into,\n conin: impl Into,\n child_watcher: ChildExitWatcher,\n ) -> Self {\n Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }\n }\n\n pub fn child_watcher(&self) -> &ChildExitWatcher {\n &self.child_watcher\n }\n}\n\nfn with_key(mut event: Event, key: usize) -> Event {\n event.key = key;\n event\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = ReadPipe;\n type Writer = WritePipe;\n\n #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n Ok(())\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n Ok(())\n }\n\n #[inline]\n fn deregister(&mut self, _poll: &Arc) -> io::Result<()> {\n self.conin.deregister();\n self.conout.deregister();\n self.child_watcher.deregister();\n\n Ok(())\n }\n\n #[inline]\n fn reader(&mut self) -> &mut Self::Reader {\n &mut self.conout\n }\n\n #[inline]\n fn writer(&mut self) -> &mut Self::Writer {\n &mut self.conin\n }\n}\n\nimpl EventedPty for Pty {\n fn next_child_event(&mut self) -> Option {\n match self.child_watcher.event_rx().try_recv() {\n Ok(ev) => Some(ev),\n Err(TryRecvError::Empty) => None,\n Err(TryRecvError::Disconnected) => Some(ChildEvent::Exited(None)),\n }\n }\n}\n\nimpl OnResize for Pty {\n fn on_resize(&mut self, window_size: WindowSize) {\n self.backend.on_resize(window_size)\n }\n}\n\nfn cmdline(config: &Options) -> String {\n let default_shell = Shell::new(\"powershell\".to_owned(), Vec::new());\n let shell = config.shell.as_ref().unwrap_or(&default_shell);\n\n once(shell.program.as_str())\n .chain(shell.args.iter().map(|s| s.as_str()))\n .collect::>()\n .join(\" \")\n}\n\n/// Converts the string slice into a Windows-standard representation for \"W\"-\n/// suffixed function variants, which accept UTF-16 encoded string values.\npub fn win32_string + ?Sized>(value: &S) -> Vec {\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n}\n", + "chunks": [ + { + "chunk_id": "doc_61_chunk_0", + "original_index": 0, + "content": "use std::ffi::OsStr;\nuse std::io::{self, Error, ErrorKind, Result};\nuse std::iter::once;\nuse std::os::windows::ffi::OsStrExt;\nuse std::sync::mpsc::TryRecvError;\nuse std::sync::Arc;\n\nuse crate::event::{OnResize, WindowSize};\nuse crate::tty::windows::child::ChildExitWatcher;\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options, Shell};\n\nmod blocking;\nmod child;\nmod conpty;\n\nuse blocking::{UnblockedReader, UnblockedWriter};\nuse conpty::Conpty as Backend;\nuse miow::pipe::{AnonRead, AnonWrite};\nuse polling::{Event, Poller};\n\npub const PTY_CHILD_EVENT_TOKEN: usize = 1;\npub const PTY_READ_WRITE_TOKEN: usize = 2;\n\ntype ReadPipe = UnblockedReader;\ntype WritePipe = UnblockedWriter;\n\n" + }, + { + "chunk_id": "doc_61_chunk_1", + "original_index": 1, + "content": "pub struct Pty {\n // XXX: Backend is required to be the first field, to ensure correct drop order. Dropping\n // `conout` before `backend` will cause a deadlock (with Conpty).\n backend: Backend,\n conout: ReadPipe,\n conin: WritePipe,\n child_watcher: ChildExitWatcher,\n}\n\npub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result {\n conpty::new(config, window_size)\n .ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n}\n\n" + }, + { + "chunk_id": "doc_61_chunk_2", + "original_index": 2, + "content": "impl Pty {\n fn new(\n backend: impl Into,\n conout: impl Into,\n conin: impl Into,\n child_watcher: ChildExitWatcher,\n ) -> Self {\n Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }\n }\n\n pub fn child_watcher(&self) -> &ChildExitWatcher {\n &self.child_watcher\n }\n}\n\nfn with_key(mut event: Event, key: usize) -> Event {\n event.key = key;\n event\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = ReadPipe;\n type Writer = WritePipe;\n\n" + }, + { + "chunk_id": "doc_61_chunk_3", + "original_index": 3, + "content": " #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n" + }, + { + "chunk_id": "doc_61_chunk_4", + "original_index": 4, + "content": " Ok(())\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n" + }, + { + "chunk_id": "doc_61_chunk_5", + "original_index": 5, + "content": " Ok(())\n }\n\n #[inline]\n fn deregister(&mut self, _poll: &Arc) -> io::Result<()> {\n self.conin.deregister();\n self.conout.deregister();\n self.child_watcher.deregister();\n\n Ok(())\n }\n\n #[inline]\n fn reader(&mut self) -> &mut Self::Reader {\n &mut self.conout\n }\n\n #[inline]\n fn writer(&mut self) -> &mut Self::Writer {\n &mut self.conin\n }\n}\n\n" + }, + { + "chunk_id": "doc_61_chunk_6", + "original_index": 6, + "content": "impl EventedPty for Pty {\n fn next_child_event(&mut self) -> Option {\n match self.child_watcher.event_rx().try_recv() {\n Ok(ev) => Some(ev),\n Err(TryRecvError::Empty) => None,\n Err(TryRecvError::Disconnected) => Some(ChildEvent::Exited(None)),\n }\n }\n}\n\nimpl OnResize for Pty {\n fn on_resize(&mut self, window_size: WindowSize) {\n self.backend.on_resize(window_size)\n }\n}\n\nfn cmdline(config: &Options) -> String {\n let default_shell = Shell::new(\"powershell\".to_owned(), Vec::new());\n let shell = config.shell.as_ref().unwrap_or(&default_shell);\n\n" + }, + { + "chunk_id": "doc_61_chunk_7", + "original_index": 7, + "content": " once(shell.program.as_str())\n .chain(shell.args.iter().map(|s| s.as_str()))\n .collect::>()\n .join(\" \")\n}\n\n/// Converts the string slice into a Windows-standard representation for \"W\"-\n/// suffixed function variants, which accept UTF-16 encoded string values.\npub fn win32_string + ?Sized>(value: &S) -> Vec {\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n}\n" + } + ] + }, + { + "doc_id": "doc_62", + "original_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", + "content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Boolean(false);\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Integer(0.into());\n let replacement = Value::Integer(1.into());\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::String(String::new());\n let replacement = Value::String(String::from(\"test\"));\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Table(Table::new());\n let replacement = Value::Table(Table::new());\n assert_eq!(merge(base.clone(), replacement), base);\n }\n\n #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n let mut expected_table = Table::new();\n expected_table.insert(String::from(\"b\"), Value::Boolean(false));\n expected_table.insert(String::from(\"a\"), Value::Boolean(true));\n expected_table.insert(String::from(\"c\"), Value::Boolean(false));\n let expected = Value::Table(expected_table);\n\n assert_eq!(merged, expected);\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_62_chunk_0", + "original_index": 0, + "content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n" + }, + { + "chunk_id": "doc_62_chunk_1", + "original_index": 1, + "content": "/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n" + }, + { + "chunk_id": "doc_62_chunk_2", + "original_index": 2, + "content": " let base = Value::Boolean(false);\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Integer(0.into());\n let replacement = Value::Integer(1.into());\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::String(String::new());\n let replacement = Value::String(String::from(\"test\"));\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Table(Table::new());\n let replacement = Value::Table(Table::new());\n assert_eq!(merge(base.clone(), replacement), base);\n }\n\n" + }, + { + "chunk_id": "doc_62_chunk_3", + "original_index": 3, + "content": " #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n" + }, + { + "chunk_id": "doc_62_chunk_4", + "original_index": 4, + "content": " let mut expected_table = Table::new();\n expected_table.insert(String::from(\"b\"), Value::Boolean(false));\n expected_table.insert(String::from(\"a\"), Value::Boolean(true));\n expected_table.insert(String::from(\"c\"), Value::Boolean(false));\n let expected = Value::Table(expected_table);\n\n assert_eq!(merged, expected);\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_63", + "original_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", + "content": "//! TTY related functionality.\n\nuse std::ffi::CStr;\nuse std::fs::File;\nuse std::io::{Error, ErrorKind, Read, Result};\nuse std::mem::MaybeUninit;\nuse std::os::unix::io::{AsRawFd, FromRawFd};\nuse std::os::unix::net::UnixStream;\nuse std::os::unix::process::CommandExt;\nuse std::process::{Child, Command, Stdio};\nuse std::sync::Arc;\nuse std::{env, ptr};\n\nuse libc::{c_int, TIOCSCTTY};\nuse log::error;\nuse polling::{Event, PollMode, Poller};\nuse rustix_openpty::openpty;\nuse rustix_openpty::rustix::termios::Winsize;\n#[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\nuse rustix_openpty::rustix::termios::{self, InputModes, OptionalActions};\nuse signal_hook::consts as sigconsts;\nuse signal_hook::low_level::pipe as signal_pipe;\n\nuse crate::event::{OnResize, WindowSize};\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options};\n\n// Interest in PTY read/writes.\npub(crate) const PTY_READ_WRITE_TOKEN: usize = 0;\n\n// Interest in new child events.\npub(crate) const PTY_CHILD_EVENT_TOKEN: usize = 1;\n\nmacro_rules! die {\n ($($arg:tt)*) => {{\n error!($($arg)*);\n std::process::exit(1);\n }}\n}\n\n/// Really only needed on BSD, but should be fine elsewhere.\nfn set_controlling_terminal(fd: c_int) {\n let res = unsafe {\n // TIOSCTTY changes based on platform and the `ioctl` call is different\n // based on architecture (32/64). So a generic cast is used to make sure\n // there are no issues. To allow such a generic cast the clippy warning\n // is disabled.\n #[allow(clippy::cast_lossless)]\n libc::ioctl(fd, TIOCSCTTY as _, 0)\n };\n\n if res < 0 {\n die!(\"ioctl TIOCSCTTY failed: {}\", Error::last_os_error());\n }\n}\n\n#[derive(Debug)]\nstruct Passwd<'a> {\n name: &'a str,\n dir: &'a str,\n shell: &'a str,\n}\n\n/// Return a Passwd struct with pointers into the provided buf.\n///\n/// # Unsafety\n///\n/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.\nfn get_pw_entry(buf: &mut [i8; 1024]) -> Result> {\n // Create zeroed passwd struct.\n let mut entry: MaybeUninit = MaybeUninit::uninit();\n\n let mut res: *mut libc::passwd = ptr::null_mut();\n\n // Try and read the pw file.\n let uid = unsafe { libc::getuid() };\n let status = unsafe {\n libc::getpwuid_r(uid, entry.as_mut_ptr(), buf.as_mut_ptr() as *mut _, buf.len(), &mut res)\n };\n let entry = unsafe { entry.assume_init() };\n\n if status < 0 {\n return Err(Error::new(ErrorKind::Other, \"getpwuid_r failed\"));\n }\n\n if res.is_null() {\n return Err(Error::new(ErrorKind::Other, \"pw not found\"));\n }\n\n // Sanity check.\n assert_eq!(entry.pw_uid, uid);\n\n // Build a borrowed Passwd struct.\n Ok(Passwd {\n name: unsafe { CStr::from_ptr(entry.pw_name).to_str().unwrap() },\n dir: unsafe { CStr::from_ptr(entry.pw_dir).to_str().unwrap() },\n shell: unsafe { CStr::from_ptr(entry.pw_shell).to_str().unwrap() },\n })\n}\n\npub struct Pty {\n child: Child,\n file: File,\n signals: UnixStream,\n}\n\nimpl Pty {\n pub fn child(&self) -> &Child {\n &self.child\n }\n\n pub fn file(&self) -> &File {\n &self.file\n }\n}\n\n/// User information that is required for a new shell session.\nstruct ShellUser {\n user: String,\n home: String,\n shell: String,\n}\n\nimpl ShellUser {\n /// look for shell, username, longname, and home dir in the respective environment variables\n /// before falling back on looking in to `passwd`.\n fn from_env() -> Result {\n let mut buf = [0; 1024];\n let pw = get_pw_entry(&mut buf);\n\n let user = match env::var(\"USER\") {\n Ok(user) => user,\n Err(_) => match pw {\n Ok(ref pw) => pw.name.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let home = match env::var(\"HOME\") {\n Ok(home) => home,\n Err(_) => match pw {\n Ok(ref pw) => pw.dir.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let shell = match env::var(\"SHELL\") {\n Ok(shell) => shell,\n Err(_) => match pw {\n Ok(ref pw) => pw.shell.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n Ok(Self { user, home, shell })\n }\n}\n\n#[cfg(not(target_os = \"macos\"))]\nfn default_shell_command(shell: &str, _user: &str) -> Command {\n Command::new(shell)\n}\n\n#[cfg(target_os = \"macos\")]\nfn default_shell_command(shell: &str, user: &str) -> Command {\n let shell_name = shell.rsplit('/').next().unwrap();\n\n // On macOS, use the `login` command so the shell will appear as a tty session.\n let mut login_command = Command::new(\"/usr/bin/login\");\n\n // Exec the shell with argv[0] prepended by '-' so it becomes a login shell.\n // `login` normally does this itself, but `-l` disables this.\n let exec = format!(\"exec -a -{} {}\", shell_name, shell);\n\n // -f: Bypasses authentication for the already-logged-in user.\n // -l: Skips changing directory to $HOME and prepending '-' to argv[0].\n // -p: Preserves the environment.\n //\n // XXX: we use zsh here over sh due to `exec -a`.\n login_command.args([\"-flp\", user, \"/bin/zsh\", \"-c\", &exec]);\n login_command\n}\n\n/// Create a new TTY and return a handle to interact with it.\npub fn new(config: &Options, window_size: WindowSize, window_id: u64) -> Result {\n let pty = openpty(None, Some(&window_size.to_winsize()))?;\n let (master, slave) = (pty.controller, pty.user);\n let master_fd = master.as_raw_fd();\n let slave_fd = slave.as_raw_fd();\n\n #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n if let Ok(mut termios) = termios::tcgetattr(&master) {\n // Set character encoding to UTF-8.\n termios.input_modes.set(InputModes::IUTF8, true);\n let _ = termios::tcsetattr(&master, OptionalActions::Now, &termios);\n }\n\n let user = ShellUser::from_env()?;\n\n let mut builder = if let Some(shell) = config.shell.as_ref() {\n let mut cmd = Command::new(&shell.program);\n cmd.args(shell.args.as_slice());\n cmd\n } else {\n default_shell_command(&user.shell, &user.user)\n };\n\n // Setup child stdin/stdout/stderr as slave fd of PTY.\n // Ownership of fd is transferred to the Stdio structs and will be closed by them at the end of\n // this scope. (It is not an issue that the fd is closed three times since File::drop ignores\n // error on libc::close.).\n builder.stdin(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stderr(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stdout(unsafe { Stdio::from_raw_fd(slave_fd) });\n\n // Setup shell environment.\n let window_id = window_id.to_string();\n builder.env(\"ALACRITTY_WINDOW_ID\", &window_id);\n builder.env(\"USER\", user.user);\n builder.env(\"HOME\", user.home);\n // Set Window ID for clients relying on X11 hacks.\n builder.env(\"WINDOWID\", window_id);\n for (key, value) in &config.env {\n builder.env(key, value);\n }\n\n unsafe {\n builder.pre_exec(move || {\n // Create a new process group.\n let err = libc::setsid();\n if err == -1 {\n return Err(Error::new(ErrorKind::Other, \"Failed to set session id\"));\n }\n\n set_controlling_terminal(slave_fd);\n\n // No longer need slave/master fds.\n libc::close(slave_fd);\n libc::close(master_fd);\n\n libc::signal(libc::SIGCHLD, libc::SIG_DFL);\n libc::signal(libc::SIGHUP, libc::SIG_DFL);\n libc::signal(libc::SIGINT, libc::SIG_DFL);\n libc::signal(libc::SIGQUIT, libc::SIG_DFL);\n libc::signal(libc::SIGTERM, libc::SIG_DFL);\n libc::signal(libc::SIGALRM, libc::SIG_DFL);\n\n Ok(())\n });\n }\n\n // Handle set working directory option.\n if let Some(dir) = &config.working_directory {\n builder.current_dir(dir);\n }\n\n // Prepare signal handling before spawning child.\n let signals = {\n let (sender, recv) = UnixStream::pair()?;\n\n // Register the recv end of the pipe for SIGCHLD.\n signal_pipe::register(sigconsts::SIGCHLD, sender)?;\n recv.set_nonblocking(true)?;\n recv\n };\n\n match builder.spawn() {\n Ok(child) => {\n unsafe {\n // Maybe this should be done outside of this function so nonblocking\n // isn't forced upon consumers. Although maybe it should be?\n set_nonblocking(master_fd);\n }\n\n Ok(Pty { child, file: File::from(master), signals })\n },\n Err(err) => Err(Error::new(\n err.kind(),\n format!(\n \"Failed to spawn command '{}': {}\",\n builder.get_program().to_string_lossy(),\n err\n ),\n )),\n }\n}\n\nimpl Drop for Pty {\n fn drop(&mut self) {\n // Make sure the PTY is terminated properly.\n unsafe {\n libc::kill(self.child.id() as i32, libc::SIGHUP);\n }\n let _ = self.child.wait();\n }\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = File;\n type Writer = File;\n\n #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n unsafe {\n poll.add_with_mode(&self.file, interest, poll_opts)?;\n }\n\n unsafe {\n poll.add_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n poll.modify_with_mode(&self.file, interest, poll_opts)?;\n\n poll.modify_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n\n #[inline]\n fn deregister(&mut self, poll: &Arc) -> Result<()> {\n poll.delete(&self.file)?;\n poll.delete(&self.signals)\n }\n\n #[inline]\n fn reader(&mut self) -> &mut File {\n &mut self.file\n }\n\n #[inline]\n fn writer(&mut self) -> &mut File {\n &mut self.file\n }\n}\n\nimpl EventedPty for Pty {\n #[inline]\n fn next_child_event(&mut self) -> Option {\n // See if there has been a SIGCHLD.\n let mut buf = [0u8; 1];\n if let Err(err) = self.signals.read(&mut buf) {\n if err.kind() != ErrorKind::WouldBlock {\n error!(\"Error reading from signal pipe: {}\", err);\n }\n return None;\n }\n\n // Match on the child process.\n match self.child.try_wait() {\n Err(err) => {\n error!(\"Error checking child process termination: {}\", err);\n None\n },\n Ok(None) => None,\n Ok(exit_status) => Some(ChildEvent::Exited(exit_status.and_then(|s| s.code()))),\n }\n }\n}\n\nimpl OnResize for Pty {\n /// Resize the PTY.\n ///\n /// Tells the kernel that the window size changed with the new pixel\n /// dimensions and line/column counts.\n fn on_resize(&mut self, window_size: WindowSize) {\n let win = window_size.to_winsize();\n\n let res = unsafe { libc::ioctl(self.file.as_raw_fd(), libc::TIOCSWINSZ, &win as *const _) };\n\n if res < 0 {\n die!(\"ioctl TIOCSWINSZ failed: {}\", Error::last_os_error());\n }\n }\n}\n\n/// Types that can produce a `Winsize`.\npub trait ToWinsize {\n /// Get a `Winsize`.\n fn to_winsize(self) -> Winsize;\n}\n\nimpl ToWinsize for WindowSize {\n fn to_winsize(self) -> Winsize {\n let ws_row = self.num_lines as libc::c_ushort;\n let ws_col = self.num_cols as libc::c_ushort;\n\n let ws_xpixel = ws_col * self.cell_width as libc::c_ushort;\n let ws_ypixel = ws_row * self.cell_height as libc::c_ushort;\n Winsize { ws_row, ws_col, ws_xpixel, ws_ypixel }\n }\n}\n\nunsafe fn set_nonblocking(fd: c_int) {\n use libc::{fcntl, F_GETFL, F_SETFL, O_NONBLOCK};\n\n let res = fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK);\n assert_eq!(res, 0);\n}\n\n#[test]\nfn test_get_pw_entry() {\n let mut buf: [i8; 1024] = [0; 1024];\n let _pw = get_pw_entry(&mut buf).unwrap();\n}\n", + "chunks": [ + { + "chunk_id": "doc_63_chunk_0", + "original_index": 0, + "content": "//! TTY related functionality.\n\nuse std::ffi::CStr;\nuse std::fs::File;\nuse std::io::{Error, ErrorKind, Read, Result};\nuse std::mem::MaybeUninit;\nuse std::os::unix::io::{AsRawFd, FromRawFd};\nuse std::os::unix::net::UnixStream;\nuse std::os::unix::process::CommandExt;\nuse std::process::{Child, Command, Stdio};\nuse std::sync::Arc;\nuse std::{env, ptr};\n\nuse libc::{c_int, TIOCSCTTY};\nuse log::error;\nuse polling::{Event, PollMode, Poller};\nuse rustix_openpty::openpty;\nuse rustix_openpty::rustix::termios::Winsize;\n#[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\nuse rustix_openpty::rustix::termios::{self, InputModes, OptionalActions};\nuse signal_hook::consts as sigconsts;\nuse signal_hook::low_level::pipe as signal_pipe;\n\n" + }, + { + "chunk_id": "doc_63_chunk_1", + "original_index": 1, + "content": "use crate::event::{OnResize, WindowSize};\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options};\n\n// Interest in PTY read/writes.\npub(crate) const PTY_READ_WRITE_TOKEN: usize = 0;\n\n// Interest in new child events.\npub(crate) const PTY_CHILD_EVENT_TOKEN: usize = 1;\n\nmacro_rules! die {\n ($($arg:tt)*) => {{\n error!($($arg)*);\n std::process::exit(1);\n }}\n}\n\n/// Really only needed on BSD, but should be fine elsewhere.\nfn set_controlling_terminal(fd: c_int) {\n let res = unsafe {\n // TIOSCTTY changes based on platform and the `ioctl` call is different\n // based on architecture (32/64). So a generic cast is used to make sure\n // there are no issues. To allow such a generic cast the clippy warning\n // is disabled.\n #[allow(clippy::cast_lossless)]\n libc::ioctl(fd, TIOCSCTTY as _, 0)\n };\n\n" + }, + { + "chunk_id": "doc_63_chunk_2", + "original_index": 2, + "content": " if res < 0 {\n die!(\"ioctl TIOCSCTTY failed: {}\", Error::last_os_error());\n }\n}\n\n#[derive(Debug)]\nstruct Passwd<'a> {\n name: &'a str,\n dir: &'a str,\n shell: &'a str,\n}\n\n/// Return a Passwd struct with pointers into the provided buf.\n///\n/// # Unsafety\n///\n/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.\nfn get_pw_entry(buf: &mut [i8; 1024]) -> Result> {\n // Create zeroed passwd struct.\n let mut entry: MaybeUninit = MaybeUninit::uninit();\n\n let mut res: *mut libc::passwd = ptr::null_mut();\n\n // Try and read the pw file.\n let uid = unsafe { libc::getuid() };\n let status = unsafe {\n libc::getpwuid_r(uid, entry.as_mut_ptr(), buf.as_mut_ptr() as *mut _, buf.len(), &mut res)\n };\n let entry = unsafe { entry.assume_init() };\n\n" + }, + { + "chunk_id": "doc_63_chunk_3", + "original_index": 3, + "content": " if status < 0 {\n return Err(Error::new(ErrorKind::Other, \"getpwuid_r failed\"));\n }\n\n if res.is_null() {\n return Err(Error::new(ErrorKind::Other, \"pw not found\"));\n }\n\n // Sanity check.\n assert_eq!(entry.pw_uid, uid);\n\n // Build a borrowed Passwd struct.\n Ok(Passwd {\n name: unsafe { CStr::from_ptr(entry.pw_name).to_str().unwrap() },\n dir: unsafe { CStr::from_ptr(entry.pw_dir).to_str().unwrap() },\n shell: unsafe { CStr::from_ptr(entry.pw_shell).to_str().unwrap() },\n })\n}\n\n" + }, + { + "chunk_id": "doc_63_chunk_4", + "original_index": 4, + "content": "pub struct Pty {\n child: Child,\n file: File,\n signals: UnixStream,\n}\n\nimpl Pty {\n pub fn child(&self) -> &Child {\n &self.child\n }\n\n pub fn file(&self) -> &File {\n &self.file\n }\n}\n\n/// User information that is required for a new shell session.\nstruct ShellUser {\n user: String,\n home: String,\n shell: String,\n}\n\nimpl ShellUser {\n /// look for shell, username, longname, and home dir in the respective environment variables\n /// before falling back on looking in to `passwd`.\n fn from_env() -> Result {\n let mut buf = [0; 1024];\n let pw = get_pw_entry(&mut buf);\n\n" + }, + { + "chunk_id": "doc_63_chunk_5", + "original_index": 5, + "content": " let user = match env::var(\"USER\") {\n Ok(user) => user,\n Err(_) => match pw {\n Ok(ref pw) => pw.name.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let home = match env::var(\"HOME\") {\n Ok(home) => home,\n Err(_) => match pw {\n Ok(ref pw) => pw.dir.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let shell = match env::var(\"SHELL\") {\n Ok(shell) => shell,\n Err(_) => match pw {\n Ok(ref pw) => pw.shell.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n Ok(Self { user, home, shell })\n }\n}\n\n#[cfg(not(target_os = \"macos\"))]\nfn default_shell_command(shell: &str, _user: &str) -> Command {\n Command::new(shell)\n}\n\n" + }, + { + "chunk_id": "doc_63_chunk_6", + "original_index": 6, + "content": "#[cfg(target_os = \"macos\")]\nfn default_shell_command(shell: &str, user: &str) -> Command {\n let shell_name = shell.rsplit('/').next().unwrap();\n\n // On macOS, use the `login` command so the shell will appear as a tty session.\n let mut login_command = Command::new(\"/usr/bin/login\");\n\n // Exec the shell with argv[0] prepended by '-' so it becomes a login shell.\n // `login` normally does this itself, but `-l` disables this.\n let exec = format!(\"exec -a -{} {}\", shell_name, shell);\n\n" + }, + { + "chunk_id": "doc_63_chunk_7", + "original_index": 7, + "content": " // -f: Bypasses authentication for the already-logged-in user.\n // -l: Skips changing directory to $HOME and prepending '-' to argv[0].\n // -p: Preserves the environment.\n //\n // XXX: we use zsh here over sh due to `exec -a`.\n login_command.args([\"-flp\", user, \"/bin/zsh\", \"-c\", &exec]);\n login_command\n}\n\n/// Create a new TTY and return a handle to interact with it.\npub fn new(config: &Options, window_size: WindowSize, window_id: u64) -> Result {\n let pty = openpty(None, Some(&window_size.to_winsize()))?;\n let (master, slave) = (pty.controller, pty.user);\n let master_fd = master.as_raw_fd();\n let slave_fd = slave.as_raw_fd();\n\n" + }, + { + "chunk_id": "doc_63_chunk_8", + "original_index": 8, + "content": " #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n if let Ok(mut termios) = termios::tcgetattr(&master) {\n // Set character encoding to UTF-8.\n termios.input_modes.set(InputModes::IUTF8, true);\n let _ = termios::tcsetattr(&master, OptionalActions::Now, &termios);\n }\n\n let user = ShellUser::from_env()?;\n\n let mut builder = if let Some(shell) = config.shell.as_ref() {\n let mut cmd = Command::new(&shell.program);\n cmd.args(shell.args.as_slice());\n cmd\n } else {\n default_shell_command(&user.shell, &user.user)\n };\n\n" + }, + { + "chunk_id": "doc_63_chunk_9", + "original_index": 9, + "content": " // Setup child stdin/stdout/stderr as slave fd of PTY.\n // Ownership of fd is transferred to the Stdio structs and will be closed by them at the end of\n // this scope. (It is not an issue that the fd is closed three times since File::drop ignores\n // error on libc::close.).\n builder.stdin(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stderr(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stdout(unsafe { Stdio::from_raw_fd(slave_fd) });\n\n" + }, + { + "chunk_id": "doc_63_chunk_10", + "original_index": 10, + "content": " // Setup shell environment.\n let window_id = window_id.to_string();\n builder.env(\"ALACRITTY_WINDOW_ID\", &window_id);\n builder.env(\"USER\", user.user);\n builder.env(\"HOME\", user.home);\n // Set Window ID for clients relying on X11 hacks.\n builder.env(\"WINDOWID\", window_id);\n for (key, value) in &config.env {\n builder.env(key, value);\n }\n\n unsafe {\n builder.pre_exec(move || {\n // Create a new process group.\n let err = libc::setsid();\n if err == -1 {\n return Err(Error::new(ErrorKind::Other, \"Failed to set session id\"));\n }\n\n set_controlling_terminal(slave_fd);\n\n // No longer need slave/master fds.\n libc::close(slave_fd);\n libc::close(master_fd);\n\n" + }, + { + "chunk_id": "doc_63_chunk_11", + "original_index": 11, + "content": " libc::signal(libc::SIGCHLD, libc::SIG_DFL);\n libc::signal(libc::SIGHUP, libc::SIG_DFL);\n libc::signal(libc::SIGINT, libc::SIG_DFL);\n libc::signal(libc::SIGQUIT, libc::SIG_DFL);\n libc::signal(libc::SIGTERM, libc::SIG_DFL);\n libc::signal(libc::SIGALRM, libc::SIG_DFL);\n\n Ok(())\n });\n }\n\n // Handle set working directory option.\n if let Some(dir) = &config.working_directory {\n builder.current_dir(dir);\n }\n\n // Prepare signal handling before spawning child.\n let signals = {\n let (sender, recv) = UnixStream::pair()?;\n\n" + }, + { + "chunk_id": "doc_63_chunk_12", + "original_index": 12, + "content": " // Register the recv end of the pipe for SIGCHLD.\n signal_pipe::register(sigconsts::SIGCHLD, sender)?;\n recv.set_nonblocking(true)?;\n recv\n };\n\n match builder.spawn() {\n Ok(child) => {\n unsafe {\n // Maybe this should be done outside of this function so nonblocking\n // isn't forced upon consumers. Although maybe it should be?\n set_nonblocking(master_fd);\n }\n\n Ok(Pty { child, file: File::from(master), signals })\n },\n Err(err) => Err(Error::new(\n err.kind(),\n format!(\n \"Failed to spawn command '{}': {}\",\n builder.get_program().to_string_lossy(),\n err\n ),\n )),\n }\n}\n\n" + }, + { + "chunk_id": "doc_63_chunk_13", + "original_index": 13, + "content": "impl Drop for Pty {\n fn drop(&mut self) {\n // Make sure the PTY is terminated properly.\n unsafe {\n libc::kill(self.child.id() as i32, libc::SIGHUP);\n }\n let _ = self.child.wait();\n }\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = File;\n type Writer = File;\n\n #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n unsafe {\n poll.add_with_mode(&self.file, interest, poll_opts)?;\n }\n\n" + }, + { + "chunk_id": "doc_63_chunk_14", + "original_index": 14, + "content": " unsafe {\n poll.add_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n poll.modify_with_mode(&self.file, interest, poll_opts)?;\n\n" + }, + { + "chunk_id": "doc_63_chunk_15", + "original_index": 15, + "content": " poll.modify_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n\n #[inline]\n fn deregister(&mut self, poll: &Arc) -> Result<()> {\n poll.delete(&self.file)?;\n poll.delete(&self.signals)\n }\n\n #[inline]\n fn reader(&mut self) -> &mut File {\n &mut self.file\n }\n\n #[inline]\n fn writer(&mut self) -> &mut File {\n &mut self.file\n }\n}\n\n" + }, + { + "chunk_id": "doc_63_chunk_16", + "original_index": 16, + "content": "impl EventedPty for Pty {\n #[inline]\n fn next_child_event(&mut self) -> Option {\n // See if there has been a SIGCHLD.\n let mut buf = [0u8; 1];\n if let Err(err) = self.signals.read(&mut buf) {\n if err.kind() != ErrorKind::WouldBlock {\n error!(\"Error reading from signal pipe: {}\", err);\n }\n return None;\n }\n\n // Match on the child process.\n match self.child.try_wait() {\n Err(err) => {\n error!(\"Error checking child process termination: {}\", err);\n None\n },\n Ok(None) => None,\n Ok(exit_status) => Some(ChildEvent::Exited(exit_status.and_then(|s| s.code()))),\n }\n }\n}\n\n" + }, + { + "chunk_id": "doc_63_chunk_17", + "original_index": 17, + "content": "impl OnResize for Pty {\n /// Resize the PTY.\n ///\n /// Tells the kernel that the window size changed with the new pixel\n /// dimensions and line/column counts.\n fn on_resize(&mut self, window_size: WindowSize) {\n let win = window_size.to_winsize();\n\n let res = unsafe { libc::ioctl(self.file.as_raw_fd(), libc::TIOCSWINSZ, &win as *const _) };\n\n if res < 0 {\n die!(\"ioctl TIOCSWINSZ failed: {}\", Error::last_os_error());\n }\n }\n}\n\n" + }, + { + "chunk_id": "doc_63_chunk_18", + "original_index": 18, + "content": "/// Types that can produce a `Winsize`.\npub trait ToWinsize {\n /// Get a `Winsize`.\n fn to_winsize(self) -> Winsize;\n}\n\nimpl ToWinsize for WindowSize {\n fn to_winsize(self) -> Winsize {\n let ws_row = self.num_lines as libc::c_ushort;\n let ws_col = self.num_cols as libc::c_ushort;\n\n let ws_xpixel = ws_col * self.cell_width as libc::c_ushort;\n let ws_ypixel = ws_row * self.cell_height as libc::c_ushort;\n Winsize { ws_row, ws_col, ws_xpixel, ws_ypixel }\n }\n}\n\nunsafe fn set_nonblocking(fd: c_int) {\n use libc::{fcntl, F_GETFL, F_SETFL, O_NONBLOCK};\n\n let res = fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK);\n assert_eq!(res, 0);\n}\n\n#[test]\nfn test_get_pw_entry() {\n let mut buf: [i8; 1024] = [0; 1024];\n let _pw = get_pw_entry(&mut buf).unwrap();\n}\n" + } + ] + }, + { + "doc_id": "doc_64", + "original_uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", + "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_64_chunk_0", + "original_index": 0, + "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_65", + "original_uuid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578", + "content": "use log::{debug, warn};\nuse raw_window_handle::RawDisplayHandle;\n\nuse alacritty_terminal::term::ClipboardType;\n\n#[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\nuse copypasta::nop_clipboard::NopClipboardContext;\n#[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::wayland_clipboard;\n#[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::x11_clipboard::{Primary as X11SelectionClipboard, X11ClipboardContext};\n#[cfg(any(feature = \"x11\", target_os = \"macos\", windows))]\nuse copypasta::ClipboardContext;\nuse copypasta::ClipboardProvider;\n\npub struct Clipboard {\n clipboard: Box,\n selection: Option>,\n}\n\nimpl Clipboard {\n pub unsafe fn new(display: RawDisplayHandle) -> Self {\n match display {\n #[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\n RawDisplayHandle::Wayland(display) => {\n let (selection, clipboard) =\n wayland_clipboard::create_clipboards_from_external(display.display);\n Self { clipboard: Box::new(clipboard), selection: Some(Box::new(selection)) }\n },\n _ => Self::default(),\n }\n }\n\n /// Used for tests and to handle missing clipboard provider when built without the `x11`\n /// feature.\n #[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\n pub fn new_nop() -> Self {\n Self { clipboard: Box::new(NopClipboardContext::new().unwrap()), selection: None }\n }\n}\n\nimpl Default for Clipboard {\n fn default() -> Self {\n #[cfg(any(target_os = \"macos\", windows))]\n return Self { clipboard: Box::new(ClipboardContext::new().unwrap()), selection: None };\n\n #[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\n return Self {\n clipboard: Box::new(ClipboardContext::new().unwrap()),\n selection: Some(Box::new(X11ClipboardContext::::new().unwrap())),\n };\n\n #[cfg(not(any(feature = \"x11\", target_os = \"macos\", windows)))]\n return Self::new_nop();\n }\n}\n\nimpl Clipboard {\n pub fn store(&mut self, ty: ClipboardType, text: impl Into) {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n (ClipboardType::Selection, None) => return,\n _ => &mut self.clipboard,\n };\n\n clipboard.set_contents(text.into()).unwrap_or_else(|err| {\n warn!(\"Unable to store text in clipboard: {}\", err);\n });\n }\n\n pub fn load(&mut self, ty: ClipboardType) -> String {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n _ => &mut self.clipboard,\n };\n\n match clipboard.get_contents() {\n Err(err) => {\n debug!(\"Unable to load text from clipboard: {}\", err);\n String::new()\n },\n Ok(text) => text,\n }\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_65_chunk_0", + "original_index": 0, + "content": "use log::{debug, warn};\nuse raw_window_handle::RawDisplayHandle;\n\nuse alacritty_terminal::term::ClipboardType;\n\n#[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\nuse copypasta::nop_clipboard::NopClipboardContext;\n#[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::wayland_clipboard;\n#[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::x11_clipboard::{Primary as X11SelectionClipboard, X11ClipboardContext};\n#[cfg(any(feature = \"x11\", target_os = \"macos\", windows))]\nuse copypasta::ClipboardContext;\nuse copypasta::ClipboardProvider;\n\n" + }, + { + "chunk_id": "doc_65_chunk_1", + "original_index": 1, + "content": "pub struct Clipboard {\n clipboard: Box,\n selection: Option>,\n}\n\nimpl Clipboard {\n pub unsafe fn new(display: RawDisplayHandle) -> Self {\n match display {\n #[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\n RawDisplayHandle::Wayland(display) => {\n let (selection, clipboard) =\n wayland_clipboard::create_clipboards_from_external(display.display);\n Self { clipboard: Box::new(clipboard), selection: Some(Box::new(selection)) }\n },\n _ => Self::default(),\n }\n }\n\n" + }, + { + "chunk_id": "doc_65_chunk_2", + "original_index": 2, + "content": " /// Used for tests and to handle missing clipboard provider when built without the `x11`\n /// feature.\n #[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\n pub fn new_nop() -> Self {\n Self { clipboard: Box::new(NopClipboardContext::new().unwrap()), selection: None }\n }\n}\n\nimpl Default for Clipboard {\n fn default() -> Self {\n #[cfg(any(target_os = \"macos\", windows))]\n return Self { clipboard: Box::new(ClipboardContext::new().unwrap()), selection: None };\n\n" + }, + { + "chunk_id": "doc_65_chunk_3", + "original_index": 3, + "content": " #[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\n return Self {\n clipboard: Box::new(ClipboardContext::new().unwrap()),\n selection: Some(Box::new(X11ClipboardContext::::new().unwrap())),\n };\n\n #[cfg(not(any(feature = \"x11\", target_os = \"macos\", windows)))]\n return Self::new_nop();\n }\n}\n\nimpl Clipboard {\n pub fn store(&mut self, ty: ClipboardType, text: impl Into) {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n (ClipboardType::Selection, None) => return,\n _ => &mut self.clipboard,\n };\n\n" + }, + { + "chunk_id": "doc_65_chunk_4", + "original_index": 4, + "content": " clipboard.set_contents(text.into()).unwrap_or_else(|err| {\n warn!(\"Unable to store text in clipboard: {}\", err);\n });\n }\n\n pub fn load(&mut self, ty: ClipboardType) -> String {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n _ => &mut self.clipboard,\n };\n\n match clipboard.get_contents() {\n Err(err) => {\n debug!(\"Unable to load text from clipboard: {}\", err);\n String::new()\n },\n Ok(text) => text,\n }\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_66", + "original_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", + "content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_66_chunk_0", + "original_index": 0, + "content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n" + }, + { + "chunk_id": "doc_66_chunk_1", + "original_index": 1, + "content": " interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n" + }, + { + "chunk_id": "doc_66_chunk_2", + "original_index": 2, + "content": " while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n" + }, + { + "chunk_id": "doc_66_chunk_3", + "original_index": 3, + "content": " // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n" + }, + { + "chunk_id": "doc_66_chunk_4", + "original_index": 4, + "content": " /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_67", + "original_uuid": "6e284600be25c8833b866ef0ebfab953a8d0a0f8420cfe56fa17e28664de1b82", + "content": "use std::io::Write;\nuse std::{io, panic};\n\nuse windows_sys::Win32::UI::WindowsAndMessaging::{\n MessageBoxW, MB_ICONERROR, MB_OK, MB_SETFOREGROUND, MB_TASKMODAL,\n};\n\nuse alacritty_terminal::tty::windows::win32_string;\n\n// Install a panic handler that renders the panic in a classical Windows error\n// dialog box as well as writes the panic to STDERR.\npub fn attach_handler() {\n panic::set_hook(Box::new(|panic_info| {\n let _ = writeln!(io::stderr(), \"{}\", panic_info);\n let msg = format!(\"{}\\n\\nPress Ctrl-C to Copy\", panic_info);\n unsafe {\n MessageBoxW(\n 0isize,\n win32_string(&msg).as_ptr(),\n win32_string(\"Alacritty: Runtime Error\").as_ptr(),\n MB_ICONERROR | MB_OK | MB_SETFOREGROUND | MB_TASKMODAL,\n );\n }\n }));\n}\n", + "chunks": [ + { + "chunk_id": "doc_67_chunk_0", + "original_index": 0, + "content": "use std::io::Write;\nuse std::{io, panic};\n\nuse windows_sys::Win32::UI::WindowsAndMessaging::{\n MessageBoxW, MB_ICONERROR, MB_OK, MB_SETFOREGROUND, MB_TASKMODAL,\n};\n\nuse alacritty_terminal::tty::windows::win32_string;\n\n// Install a panic handler that renders the panic in a classical Windows error\n// dialog box as well as writes the panic to STDERR.\npub fn attach_handler() {\n panic::set_hook(Box::new(|panic_info| {\n let _ = writeln!(io::stderr(), \"{}\", panic_info);\n let msg = format!(\"{}\\n\\nPress Ctrl-C to Copy\", panic_info);\n unsafe {\n MessageBoxW(\n 0isize,\n win32_string(&msg).as_ptr(),\n win32_string(\"Alacritty: Runtime Error\").as_ptr(),\n MB_ICONERROR | MB_OK | MB_SETFOREGROUND | MB_TASKMODAL,\n );\n }\n }));\n}\n" + } + ] + }, + { + "doc_id": "doc_68", + "original_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", + "content": "//! Defines the Row type which makes up lines in the grid.\n\nuse std::cmp::{max, min};\nuse std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};\nuse std::{ptr, slice};\n\n#[cfg(feature = \"serde\")]\nuse serde::{Deserialize, Serialize};\n\nuse crate::grid::GridCell;\nuse crate::index::Column;\nuse crate::term::cell::ResetDiscriminant;\n\n/// A row in the grid.\n#[derive(Default, Clone, Debug)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct Row {\n inner: Vec,\n\n /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n let mut inner: Vec = Vec::with_capacity(columns);\n\n // This is a slightly optimized version of `std::vec::Vec::resize`.\n unsafe {\n let mut ptr = inner.as_mut_ptr();\n\n for _ in 1..columns {\n ptr::write(ptr, T::default());\n ptr = ptr.offset(1);\n }\n ptr::write(ptr, T::default());\n\n inner.set_len(columns);\n }\n\n Row { inner, occ: 0 }\n }\n\n /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n /// Reset all cells in the row to the `template` cell.\n #[inline]\n pub fn reset(&mut self, template: &T)\n where\n T: ResetDiscriminant + GridCell,\n D: PartialEq,\n {\n debug_assert!(!self.inner.is_empty());\n\n // Mark all cells as dirty if template cell changed.\n let len = self.inner.len();\n if self.inner[len - 1].discriminant() != template.discriminant() {\n self.occ = len;\n }\n\n // Reset every dirty cell in the row.\n for item in &mut self.inner[0..self.occ] {\n item.reset(template);\n }\n\n self.occ = 0;\n }\n}\n\n#[allow(clippy::len_without_is_empty)]\nimpl Row {\n #[inline]\n pub fn from_vec(vec: Vec, occ: usize) -> Row {\n Row { inner: vec, occ }\n }\n\n #[inline]\n pub fn len(&self) -> usize {\n self.inner.len()\n }\n\n #[inline]\n pub fn last(&self) -> Option<&T> {\n self.inner.last()\n }\n\n #[inline]\n pub fn last_mut(&mut self) -> Option<&mut T> {\n self.occ = self.inner.len();\n self.inner.last_mut()\n }\n\n #[inline]\n pub fn append(&mut self, vec: &mut Vec)\n where\n T: GridCell,\n {\n self.occ += vec.len();\n self.inner.append(vec);\n }\n\n #[inline]\n pub fn append_front(&mut self, mut vec: Vec) {\n self.occ += vec.len();\n\n vec.append(&mut self.inner);\n self.inner = vec;\n }\n\n /// Check if all cells in the row are empty.\n #[inline]\n pub fn is_clear(&self) -> bool\n where\n T: GridCell,\n {\n self.inner.iter().all(GridCell::is_empty)\n }\n\n #[inline]\n pub fn front_split_off(&mut self, at: usize) -> Vec {\n self.occ = self.occ.saturating_sub(at);\n\n let mut split = self.inner.split_off(at);\n std::mem::swap(&mut split, &mut self.inner);\n split\n }\n}\n\nimpl<'a, T> IntoIterator for &'a Row {\n type IntoIter = slice::Iter<'a, T>;\n type Item = &'a T;\n\n #[inline]\n fn into_iter(self) -> slice::Iter<'a, T> {\n self.inner.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Row {\n type IntoIter = slice::IterMut<'a, T>;\n type Item = &'a mut T;\n\n #[inline]\n fn into_iter(self) -> slice::IterMut<'a, T> {\n self.occ = self.len();\n self.inner.iter_mut()\n }\n}\n\nimpl Index for Row {\n type Output = T;\n\n #[inline]\n fn index(&self, index: Column) -> &T {\n &self.inner[index.0]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, index: Column) -> &mut T {\n self.occ = max(self.occ, *index + 1);\n &mut self.inner[index.0]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: Range) -> &[T] {\n &self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: Range) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeTo) -> &[T] {\n &self.inner[..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeTo) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[..(index.end.0)]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeFrom) -> &[T] {\n &self.inner[(index.start.0)..]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeFrom) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[(index.start.0)..]\n }\n}\n\nimpl Index for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, _: RangeFull) -> &[T] {\n &self.inner[..]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, _: RangeFull) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[..]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeToInclusive) -> &[T] {\n &self.inner[..=(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeToInclusive) -> &mut [T] {\n self.occ = max(self.occ, *index.end + 1);\n &mut self.inner[..=(index.end.0)]\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_68_chunk_0", + "original_index": 0, + "content": "//! Defines the Row type which makes up lines in the grid.\n\nuse std::cmp::{max, min};\nuse std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};\nuse std::{ptr, slice};\n\n#[cfg(feature = \"serde\")]\nuse serde::{Deserialize, Serialize};\n\nuse crate::grid::GridCell;\nuse crate::index::Column;\nuse crate::term::cell::ResetDiscriminant;\n\n/// A row in the grid.\n#[derive(Default, Clone, Debug)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct Row {\n inner: Vec,\n\n" + }, + { + "chunk_id": "doc_68_chunk_1", + "original_index": 1, + "content": " /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n" + }, + { + "chunk_id": "doc_68_chunk_2", + "original_index": 2, + "content": " let mut inner: Vec = Vec::with_capacity(columns);\n\n // This is a slightly optimized version of `std::vec::Vec::resize`.\n unsafe {\n let mut ptr = inner.as_mut_ptr();\n\n for _ in 1..columns {\n ptr::write(ptr, T::default());\n ptr = ptr.offset(1);\n }\n ptr::write(ptr, T::default());\n\n inner.set_len(columns);\n }\n\n Row { inner, occ: 0 }\n }\n\n" + }, + { + "chunk_id": "doc_68_chunk_3", + "original_index": 3, + "content": " /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n" + }, + { + "chunk_id": "doc_68_chunk_4", + "original_index": 4, + "content": " /// Reset all cells in the row to the `template` cell.\n #[inline]\n pub fn reset(&mut self, template: &T)\n where\n T: ResetDiscriminant + GridCell,\n D: PartialEq,\n {\n debug_assert!(!self.inner.is_empty());\n\n // Mark all cells as dirty if template cell changed.\n let len = self.inner.len();\n if self.inner[len - 1].discriminant() != template.discriminant() {\n self.occ = len;\n }\n\n // Reset every dirty cell in the row.\n for item in &mut self.inner[0..self.occ] {\n item.reset(template);\n }\n\n self.occ = 0;\n }\n}\n\n#[allow(clippy::len_without_is_empty)]\nimpl Row {\n #[inline]\n pub fn from_vec(vec: Vec, occ: usize) -> Row {\n Row { inner: vec, occ }\n }\n\n" + }, + { + "chunk_id": "doc_68_chunk_5", + "original_index": 5, + "content": " #[inline]\n pub fn len(&self) -> usize {\n self.inner.len()\n }\n\n #[inline]\n pub fn last(&self) -> Option<&T> {\n self.inner.last()\n }\n\n #[inline]\n pub fn last_mut(&mut self) -> Option<&mut T> {\n self.occ = self.inner.len();\n self.inner.last_mut()\n }\n\n #[inline]\n pub fn append(&mut self, vec: &mut Vec)\n where\n T: GridCell,\n {\n self.occ += vec.len();\n self.inner.append(vec);\n }\n\n #[inline]\n pub fn append_front(&mut self, mut vec: Vec) {\n self.occ += vec.len();\n\n vec.append(&mut self.inner);\n self.inner = vec;\n }\n\n" + }, + { + "chunk_id": "doc_68_chunk_6", + "original_index": 6, + "content": " /// Check if all cells in the row are empty.\n #[inline]\n pub fn is_clear(&self) -> bool\n where\n T: GridCell,\n {\n self.inner.iter().all(GridCell::is_empty)\n }\n\n #[inline]\n pub fn front_split_off(&mut self, at: usize) -> Vec {\n self.occ = self.occ.saturating_sub(at);\n\n let mut split = self.inner.split_off(at);\n std::mem::swap(&mut split, &mut self.inner);\n split\n }\n}\n\n" + }, + { + "chunk_id": "doc_68_chunk_7", + "original_index": 7, + "content": "impl<'a, T> IntoIterator for &'a Row {\n type IntoIter = slice::Iter<'a, T>;\n type Item = &'a T;\n\n #[inline]\n fn into_iter(self) -> slice::Iter<'a, T> {\n self.inner.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Row {\n type IntoIter = slice::IterMut<'a, T>;\n type Item = &'a mut T;\n\n #[inline]\n fn into_iter(self) -> slice::IterMut<'a, T> {\n self.occ = self.len();\n self.inner.iter_mut()\n }\n}\n\n" + }, + { + "chunk_id": "doc_68_chunk_8", + "original_index": 8, + "content": "impl Index for Row {\n type Output = T;\n\n #[inline]\n fn index(&self, index: Column) -> &T {\n &self.inner[index.0]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, index: Column) -> &mut T {\n self.occ = max(self.occ, *index + 1);\n &mut self.inner[index.0]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: Range) -> &[T] {\n &self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: Range) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\n" + }, + { + "chunk_id": "doc_68_chunk_9", + "original_index": 9, + "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeTo) -> &[T] {\n &self.inner[..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeTo) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[..(index.end.0)]\n }\n}\n\n" + }, + { + "chunk_id": "doc_68_chunk_10", + "original_index": 10, + "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeFrom) -> &[T] {\n &self.inner[(index.start.0)..]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeFrom) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[(index.start.0)..]\n }\n}\n\nimpl Index for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, _: RangeFull) -> &[T] {\n &self.inner[..]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, _: RangeFull) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[..]\n }\n}\n\n" + }, + { + "chunk_id": "doc_68_chunk_11", + "original_index": 11, + "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeToInclusive) -> &[T] {\n &self.inner[..=(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeToInclusive) -> &mut [T] {\n self.occ = max(self.occ, *index.end + 1);\n &mut self.inner[..=(index.end.0)]\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_69", + "original_uuid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", + "content": "use log::LevelFilter;\n\nuse alacritty_config_derive::ConfigDeserialize;\n\n/// Debugging options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Debug {\n pub log_level: LevelFilter,\n\n pub print_events: bool,\n\n /// Keep the log file after quitting.\n pub persistent_logging: bool,\n\n /// Should show render timer.\n pub render_timer: bool,\n\n /// Highlight damage information produced by alacritty.\n pub highlight_damage: bool,\n\n /// The renderer alacritty should be using.\n pub renderer: Option,\n\n /// Use EGL as display API if the current platform allows it.\n pub prefer_egl: bool,\n\n /// Record ref test.\n #[config(skip)]\n pub ref_test: bool,\n}\n\nimpl Default for Debug {\n fn default() -> Self {\n Self {\n log_level: LevelFilter::Warn,\n print_events: Default::default(),\n persistent_logging: Default::default(),\n render_timer: Default::default(),\n highlight_damage: Default::default(),\n ref_test: Default::default(),\n renderer: Default::default(),\n prefer_egl: Default::default(),\n }\n }\n}\n\n/// The renderer configuration options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum RendererPreference {\n /// OpenGL 3.3 renderer.\n Glsl3,\n\n /// GLES 2 renderer, with optional extensions like dual source blending.\n Gles2,\n\n /// Pure GLES 2 renderer.\n Gles2Pure,\n}\n", + "chunks": [ + { + "chunk_id": "doc_69_chunk_0", + "original_index": 0, + "content": "use log::LevelFilter;\n\nuse alacritty_config_derive::ConfigDeserialize;\n\n/// Debugging options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Debug {\n pub log_level: LevelFilter,\n\n pub print_events: bool,\n\n /// Keep the log file after quitting.\n pub persistent_logging: bool,\n\n /// Should show render timer.\n pub render_timer: bool,\n\n /// Highlight damage information produced by alacritty.\n pub highlight_damage: bool,\n\n /// The renderer alacritty should be using.\n pub renderer: Option,\n\n /// Use EGL as display API if the current platform allows it.\n pub prefer_egl: bool,\n\n /// Record ref test.\n #[config(skip)]\n pub ref_test: bool,\n}\n\n" + }, + { + "chunk_id": "doc_69_chunk_1", + "original_index": 1, + "content": "impl Default for Debug {\n fn default() -> Self {\n Self {\n log_level: LevelFilter::Warn,\n print_events: Default::default(),\n persistent_logging: Default::default(),\n render_timer: Default::default(),\n highlight_damage: Default::default(),\n ref_test: Default::default(),\n renderer: Default::default(),\n prefer_egl: Default::default(),\n }\n }\n}\n\n/// The renderer configuration options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum RendererPreference {\n /// OpenGL 3.3 renderer.\n Glsl3,\n\n /// GLES 2 renderer, with optional extensions like dual source blending.\n Gles2,\n\n /// Pure GLES 2 renderer.\n Gles2Pure,\n}\n" + } + ] + }, + { + "doc_id": "doc_70", + "original_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", + "content": "//! The display subsystem including window management, font rasterization, and\n//! GPU drawing.\n\nuse std::cmp;\nuse std::fmt::{self, Formatter};\nuse std::mem::{self, ManuallyDrop};\nuse std::num::NonZeroU32;\nuse std::ops::{Deref, DerefMut};\nuse std::time::{Duration, Instant};\n\nuse glutin::context::{NotCurrentContext, PossiblyCurrentContext};\nuse glutin::prelude::*;\nuse glutin::surface::{Surface, SwapInterval, WindowSurface};\n\nuse log::{debug, info};\nuse parking_lot::MutexGuard;\nuse raw_window_handle::RawWindowHandle;\nuse serde::{Deserialize, Serialize};\nuse winit::dpi::PhysicalSize;\nuse winit::keyboard::ModifiersState;\nuse winit::window::CursorIcon;\n\nuse crossfont::{Rasterize, Rasterizer, Size as FontSize};\nuse unicode_width::UnicodeWidthChar;\n\nuse alacritty_terminal::event::{EventListener, OnResize, WindowSize};\nuse alacritty_terminal::grid::Dimensions as TermDimensions;\nuse alacritty_terminal::index::{Column, Direction, Line, Point};\nuse alacritty_terminal::selection::Selection;\nuse alacritty_terminal::term::cell::Flags;\nuse alacritty_terminal::term::{\n self, point_to_viewport, LineDamageBounds, Term, TermDamage, TermMode, MIN_COLUMNS,\n MIN_SCREEN_LINES,\n};\nuse alacritty_terminal::vte::ansi::{CursorShape, NamedColor};\n\nuse crate::config::font::Font;\nuse crate::config::window::Dimensions;\n#[cfg(not(windows))]\nuse crate::config::window::StartupMode;\nuse crate::config::UiConfig;\nuse crate::display::bell::VisualBell;\nuse crate::display::color::{List, Rgb};\nuse crate::display::content::{RenderableContent, RenderableCursor};\nuse crate::display::cursor::IntoRects;\nuse crate::display::damage::{damage_y_to_viewport_y, DamageTracker};\nuse crate::display::hint::{HintMatch, HintState};\nuse crate::display::meter::Meter;\nuse crate::display::window::Window;\nuse crate::event::{Event, EventType, Mouse, SearchState};\nuse crate::message_bar::{MessageBuffer, MessageType};\nuse crate::renderer::rects::{RenderLine, RenderLines, RenderRect};\nuse crate::renderer::{self, GlyphCache, Renderer};\nuse crate::scheduler::{Scheduler, TimerId, Topic};\nuse crate::string::{ShortenDirection, StrShortener};\n\npub mod color;\npub mod content;\npub mod cursor;\npub mod hint;\npub mod window;\n\nmod bell;\nmod damage;\nmod meter;\n\n/// Label for the forward terminal search bar.\nconst FORWARD_SEARCH_LABEL: &str = \"Search: \";\n\n/// Label for the backward terminal search bar.\nconst BACKWARD_SEARCH_LABEL: &str = \"Backward Search: \";\n\n/// The character used to shorten the visible text like uri preview or search regex.\nconst SHORTENER: char = '\u2026';\n\n/// Color which is used to highlight damaged rects when debugging.\nconst DAMAGE_RECT_COLOR: Rgb = Rgb::new(255, 0, 255);\n\n#[derive(Debug)]\npub enum Error {\n /// Error with window management.\n Window(window::Error),\n\n /// Error dealing with fonts.\n Font(crossfont::Error),\n\n /// Error in renderer.\n Render(renderer::Error),\n\n /// Error during context operations.\n Context(glutin::error::Error),\n}\n\nimpl std::error::Error for Error {\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n match self {\n Error::Window(err) => err.source(),\n Error::Font(err) => err.source(),\n Error::Render(err) => err.source(),\n Error::Context(err) => err.source(),\n }\n }\n}\n\nimpl fmt::Display for Error {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n match self {\n Error::Window(err) => err.fmt(f),\n Error::Font(err) => err.fmt(f),\n Error::Render(err) => err.fmt(f),\n Error::Context(err) => err.fmt(f),\n }\n }\n}\n\nimpl From for Error {\n fn from(val: window::Error) -> Self {\n Error::Window(val)\n }\n}\n\nimpl From for Error {\n fn from(val: crossfont::Error) -> Self {\n Error::Font(val)\n }\n}\n\nimpl From for Error {\n fn from(val: renderer::Error) -> Self {\n Error::Render(val)\n }\n}\n\nimpl From for Error {\n fn from(val: glutin::error::Error) -> Self {\n Error::Context(val)\n }\n}\n\n/// Terminal size info.\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]\npub struct SizeInfo {\n /// Terminal window width.\n width: T,\n\n /// Terminal window height.\n height: T,\n\n /// Width of individual cell.\n cell_width: T,\n\n /// Height of individual cell.\n cell_height: T,\n\n /// Horizontal window padding.\n padding_x: T,\n\n /// Vertical window padding.\n padding_y: T,\n\n /// Number of lines in the viewport.\n screen_lines: usize,\n\n /// Number of columns in the viewport.\n columns: usize,\n}\n\nimpl From> for SizeInfo {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n width: size_info.width as u32,\n height: size_info.height as u32,\n cell_width: size_info.cell_width as u32,\n cell_height: size_info.cell_height as u32,\n padding_x: size_info.padding_x as u32,\n padding_y: size_info.padding_y as u32,\n screen_lines: size_info.screen_lines,\n columns: size_info.screen_lines,\n }\n }\n}\n\nimpl From> for WindowSize {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n num_cols: size_info.columns() as u16,\n num_lines: size_info.screen_lines() as u16,\n cell_width: size_info.cell_width() as u16,\n cell_height: size_info.cell_height() as u16,\n }\n }\n}\n\nimpl SizeInfo {\n #[inline]\n pub fn width(&self) -> T {\n self.width\n }\n\n #[inline]\n pub fn height(&self) -> T {\n self.height\n }\n\n #[inline]\n pub fn cell_width(&self) -> T {\n self.cell_width\n }\n\n #[inline]\n pub fn cell_height(&self) -> T {\n self.cell_height\n }\n\n #[inline]\n pub fn padding_x(&self) -> T {\n self.padding_x\n }\n\n #[inline]\n pub fn padding_y(&self) -> T {\n self.padding_y\n }\n}\n\nimpl SizeInfo {\n #[allow(clippy::too_many_arguments)]\n pub fn new(\n width: f32,\n height: f32,\n cell_width: f32,\n cell_height: f32,\n mut padding_x: f32,\n mut padding_y: f32,\n dynamic_padding: bool,\n ) -> SizeInfo {\n if dynamic_padding {\n padding_x = Self::dynamic_padding(padding_x.floor(), width, cell_width);\n padding_y = Self::dynamic_padding(padding_y.floor(), height, cell_height);\n }\n\n let lines = (height - 2. * padding_y) / cell_height;\n let screen_lines = cmp::max(lines as usize, MIN_SCREEN_LINES);\n\n let columns = (width - 2. * padding_x) / cell_width;\n let columns = cmp::max(columns as usize, MIN_COLUMNS);\n\n SizeInfo {\n width,\n height,\n cell_width,\n cell_height,\n padding_x: padding_x.floor(),\n padding_y: padding_y.floor(),\n screen_lines,\n columns,\n }\n }\n\n #[inline]\n pub fn reserve_lines(&mut self, count: usize) {\n self.screen_lines = cmp::max(self.screen_lines.saturating_sub(count), MIN_SCREEN_LINES);\n }\n\n /// Check if coordinates are inside the terminal grid.\n ///\n /// The padding, message bar or search are not counted as part of the grid.\n #[inline]\n pub fn contains_point(&self, x: usize, y: usize) -> bool {\n x <= (self.padding_x + self.columns as f32 * self.cell_width) as usize\n && x > self.padding_x as usize\n && y <= (self.padding_y + self.screen_lines as f32 * self.cell_height) as usize\n && y > self.padding_y as usize\n }\n\n /// Calculate padding to spread it evenly around the terminal content.\n #[inline]\n fn dynamic_padding(padding: f32, dimension: f32, cell_dimension: f32) -> f32 {\n padding + ((dimension - 2. * padding) % cell_dimension) / 2.\n }\n}\n\nimpl TermDimensions for SizeInfo {\n #[inline]\n fn columns(&self) -> usize {\n self.columns\n }\n\n #[inline]\n fn screen_lines(&self) -> usize {\n self.screen_lines\n }\n\n #[inline]\n fn total_lines(&self) -> usize {\n self.screen_lines()\n }\n}\n\n#[derive(Default, Clone, Debug, PartialEq, Eq)]\npub struct DisplayUpdate {\n pub dirty: bool,\n\n dimensions: Option>,\n cursor_dirty: bool,\n font: Option,\n}\n\nimpl DisplayUpdate {\n pub fn dimensions(&self) -> Option> {\n self.dimensions\n }\n\n pub fn font(&self) -> Option<&Font> {\n self.font.as_ref()\n }\n\n pub fn cursor_dirty(&self) -> bool {\n self.cursor_dirty\n }\n\n pub fn set_dimensions(&mut self, dimensions: PhysicalSize) {\n self.dimensions = Some(dimensions);\n self.dirty = true;\n }\n\n pub fn set_font(&mut self, font: Font) {\n self.font = Some(font);\n self.dirty = true;\n }\n\n pub fn set_cursor_dirty(&mut self) {\n self.cursor_dirty = true;\n self.dirty = true;\n }\n}\n\n/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n // Create the GL surface to draw into.\n let surface = renderer::platform::create_gl_surface(\n &gl_context,\n window.inner_size(),\n window.raw_window_handle(),\n )?;\n\n // Make the context current.\n let context = gl_context.make_current(&surface)?;\n\n // Create renderer.\n let mut renderer = Renderer::new(&context, config.debug.renderer)?;\n\n // Load font common glyphs to accelerate rendering.\n debug!(\"Filling glyph cache with common glyphs\");\n renderer.with_loader(|mut api| {\n glyph_cache.reset_glyph_cache(&mut api);\n });\n\n let padding = config.window.padding(window.scale_factor as f32);\n let viewport_size = window.inner_size();\n\n // Create new size with at least one column and row.\n let size_info = SizeInfo::new(\n viewport_size.width as f32,\n viewport_size.height as f32,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding && config.window.dimensions().is_none(),\n );\n\n info!(\"Cell size: {} x {}\", cell_width, cell_height);\n info!(\"Padding: {} x {}\", size_info.padding_x(), size_info.padding_y());\n info!(\"Width: {}, Height: {}\", size_info.width(), size_info.height());\n\n // Update OpenGL projection.\n renderer.resize(&size_info);\n\n // Clear screen.\n let background_color = config.colors.primary.background;\n renderer.clear(background_color, config.window_opacity());\n\n // Disable shadows for transparent windows on macOS.\n #[cfg(target_os = \"macos\")]\n window.set_has_shadow(config.window_opacity() >= 1.0);\n\n let is_wayland = matches!(raw_window_handle, RawWindowHandle::Wayland(_));\n\n // On Wayland we can safely ignore this call, since the window isn't visible until you\n // actually draw something into it and commit those changes.\n if !is_wayland {\n surface.swap_buffers(&context).expect(\"failed to swap buffers.\");\n renderer.finish();\n }\n\n // Set resize increments for the newly created window.\n if config.window.resize_increments {\n window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n window.set_visible(true);\n\n #[allow(clippy::single_match)]\n #[cfg(not(windows))]\n if !_tabbed {\n match config.window.startup_mode {\n #[cfg(target_os = \"macos\")]\n StartupMode::SimpleFullscreen => window.set_simple_fullscreen(true),\n StartupMode::Maximized if !is_wayland => window.set_maximized(true),\n _ => (),\n }\n }\n\n let hint_state = HintState::new(config.hints.alphabet());\n\n let mut damage_tracker = DamageTracker::new(size_info.screen_lines(), size_info.columns());\n damage_tracker.debug = config.debug.highlight_damage;\n\n // Disable vsync.\n if let Err(err) = surface.set_swap_interval(&context, SwapInterval::DontWait) {\n info!(\"Failed to disable vsync: {}\", err);\n }\n\n Ok(Self {\n context: ManuallyDrop::new(Replaceable::new(context)),\n visual_bell: VisualBell::from(&config.bell),\n renderer: ManuallyDrop::new(renderer),\n surface: ManuallyDrop::new(surface),\n colors: List::from(&config.colors),\n frame_timer: FrameTimer::new(),\n raw_window_handle,\n damage_tracker,\n glyph_cache,\n hint_state,\n size_info,\n font_size,\n window,\n pending_renderer_update: Default::default(),\n vi_highlighted_hint: Default::default(),\n highlighted_hint: Default::default(),\n hint_mouse_point: Default::default(),\n pending_update: Default::default(),\n cursor_hidden: Default::default(),\n meter: Default::default(),\n ime: Default::default(),\n })\n }\n\n #[inline]\n pub fn gl_context(&self) -> &PossiblyCurrentContext {\n self.context.get()\n }\n\n pub fn make_not_current(&mut self) {\n if self.context.get().is_current() {\n self.context.replace_with(|context| {\n context\n .make_not_current()\n .expect(\"failed to disable context\")\n .treat_as_possibly_current()\n });\n }\n }\n\n pub fn make_current(&self) {\n if !self.context.get().is_current() {\n self.context.make_current(&self.surface).expect(\"failed to make context current\")\n }\n }\n\n fn swap_buffers(&self) {\n #[allow(clippy::single_match)]\n let res = match (self.surface.deref(), &self.context.get()) {\n #[cfg(not(any(target_os = \"macos\", windows)))]\n (Surface::Egl(surface), PossiblyCurrentContext::Egl(context))\n if matches!(self.raw_window_handle, RawWindowHandle::Wayland(_))\n && !self.damage_tracker.debug =>\n {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n surface.swap_buffers_with_damage(context, &damage)\n },\n (surface, context) => surface.swap_buffers(context),\n };\n if let Err(err) = res {\n debug!(\"error calling swap_buffers: {}\", err);\n }\n }\n\n /// Update font size and cell dimensions.\n ///\n /// This will return a tuple of the cell width and height.\n fn update_font_size(\n glyph_cache: &mut GlyphCache,\n config: &UiConfig,\n font: &Font,\n ) -> (f32, f32) {\n let _ = glyph_cache.update_font_size(font);\n\n // Compute new cell sizes.\n compute_cell_size(config, &glyph_cache.font_metrics())\n }\n\n /// Reset glyph cache.\n fn reset_glyph_cache(&mut self) {\n let cache = &mut self.glyph_cache;\n self.renderer.with_loader(|mut api| {\n cache.reset_glyph_cache(&mut api);\n });\n }\n\n // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n let (mut cell_width, mut cell_height) =\n (self.size_info.cell_width(), self.size_info.cell_height());\n\n if pending_update.font().is_some() || pending_update.cursor_dirty() {\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.clear_font_cache = true\n }\n\n // Update font size and cell dimensions.\n if let Some(font) = pending_update.font() {\n let cell_dimensions = Self::update_font_size(&mut self.glyph_cache, config, font);\n cell_width = cell_dimensions.0;\n cell_height = cell_dimensions.1;\n\n info!(\"Cell size: {} x {}\", cell_width, cell_height);\n\n // Mark entire terminal as damaged since glyph size could change without cell size\n // changes.\n self.damage_tracker.frame().mark_fully_damaged();\n }\n\n let (mut width, mut height) = (self.size_info.width(), self.size_info.height());\n if let Some(dimensions) = pending_update.dimensions() {\n width = dimensions.width as f32;\n height = dimensions.height as f32;\n }\n\n let padding = config.window.padding(self.window.scale_factor as f32);\n\n let mut new_size = SizeInfo::new(\n width,\n height,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding,\n );\n\n // Update number of column/lines in the viewport.\n let search_active = search_state.history_index.is_some();\n let message_bar_lines = message_buffer.message().map_or(0, |m| m.text(&new_size).len());\n let search_lines = usize::from(search_active);\n new_size.reserve_lines(message_bar_lines + search_lines);\n\n // Update resize increments.\n if config.window.resize_increments {\n self.window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n // Resize when terminal when its dimensions have changed.\n if self.size_info.screen_lines() != new_size.screen_lines\n || self.size_info.columns() != new_size.columns()\n {\n // Resize PTY.\n pty_resize_handle.on_resize(new_size.into());\n\n // Resize terminal.\n terminal.resize(new_size);\n\n // Resize damage tracking.\n self.damage_tracker.resize(new_size.screen_lines(), new_size.columns());\n }\n\n // Check if dimensions have changed.\n if new_size != self.size_info {\n // Queue renderer update.\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.resize = true;\n\n // Clear focused search match.\n search_state.clear_focused_match();\n }\n self.size_info = new_size;\n }\n\n // NOTE: Renderer updates are split off, since platforms like Wayland require resize and other\n // OpenGL operations to be performed right before rendering. Otherwise they could lock the\n // back buffer and render with the previous state. This also solves flickering during resizes.\n //\n /// Update the state of the renderer.\n pub fn process_renderer_update(&mut self) {\n let renderer_update = match self.pending_renderer_update.take() {\n Some(renderer_update) => renderer_update,\n _ => return,\n };\n\n // Resize renderer.\n if renderer_update.resize {\n let width = NonZeroU32::new(self.size_info.width() as u32).unwrap();\n let height = NonZeroU32::new(self.size_info.height() as u32).unwrap();\n self.surface.resize(&self.context, width, height);\n }\n\n // Ensure we're modifying the correct OpenGL context.\n self.make_current();\n\n if renderer_update.clear_font_cache {\n self.reset_glyph_cache();\n }\n\n self.renderer.resize(&self.size_info);\n\n info!(\"Padding: {} x {}\", self.size_info.padding_x(), self.size_info.padding_y());\n info!(\"Width: {}, Height: {}\", self.size_info.width(), self.size_info.height());\n }\n\n /// Draw the screen.\n ///\n /// A reference to Term whose state is being drawn must be provided.\n ///\n /// This call may block if vsync is enabled.\n pub fn draw(\n &mut self,\n mut terminal: MutexGuard<'_, Term>,\n scheduler: &mut Scheduler,\n message_buffer: &MessageBuffer,\n config: &UiConfig,\n search_state: &mut SearchState,\n ) {\n // Collect renderable content before the terminal is dropped.\n let mut content = RenderableContent::new(config, self, &terminal, search_state);\n let mut grid_cells = Vec::new();\n for cell in &mut content {\n grid_cells.push(cell);\n }\n let selection_range = content.selection_range();\n let foreground_color = content.color(NamedColor::Foreground as usize);\n let background_color = content.color(NamedColor::Background as usize);\n let display_offset = content.display_offset();\n let cursor = content.cursor();\n\n let cursor_point = terminal.grid().cursor.point;\n let total_lines = terminal.grid().total_lines();\n let metrics = self.glyph_cache.font_metrics();\n let size_info = self.size_info;\n\n let vi_mode = terminal.mode().contains(TermMode::VI);\n let vi_cursor_point = if vi_mode { Some(terminal.vi_mode_cursor.point) } else { None };\n\n // Add damage from the terminal.\n if self.collect_damage() {\n match terminal.damage() {\n TermDamage::Full => self.damage_tracker.frame().mark_fully_damaged(),\n TermDamage::Partial(damaged_lines) => {\n for damage in damaged_lines {\n self.damage_tracker.frame().damage_line(damage);\n }\n },\n }\n terminal.reset_damage();\n }\n\n // Drop terminal as early as possible to free lock.\n drop(terminal);\n\n // Add damage from alacritty's UI elements overlapping terminal.\n if self.collect_damage() {\n let requires_full_damage = self.visual_bell.intensity() != 0.\n || self.hint_state.active()\n || search_state.regex().is_some();\n\n if requires_full_damage {\n self.damage_tracker.frame().mark_fully_damaged();\n self.damage_tracker.next_frame().mark_fully_damaged();\n }\n\n let vi_cursor_viewport_point =\n vi_cursor_point.and_then(|cursor| point_to_viewport(display_offset, cursor));\n\n self.damage_tracker.damage_vi_cursor(vi_cursor_viewport_point);\n self.damage_tracker.damage_selection(selection_range, display_offset);\n }\n\n // Make sure this window's OpenGL context is active.\n self.make_current();\n\n self.renderer.clear(background_color, config.window_opacity());\n let mut lines = RenderLines::new();\n\n // Optimize loop hint comparator.\n let has_highlighted_hint =\n self.highlighted_hint.is_some() || self.vi_highlighted_hint.is_some();\n\n // Draw grid.\n {\n let _sampler = self.meter.sampler();\n\n // Ensure macOS hasn't reset our viewport.\n #[cfg(target_os = \"macos\")]\n self.renderer.set_viewport(&size_info);\n\n let glyph_cache = &mut self.glyph_cache;\n let highlighted_hint = &self.highlighted_hint;\n let vi_highlighted_hint = &self.vi_highlighted_hint;\n let damage_tracker = &mut self.damage_tracker;\n\n self.renderer.draw_cells(\n &size_info,\n glyph_cache,\n grid_cells.into_iter().map(|mut cell| {\n // Underline hints hovered by mouse or vi mode cursor.\n let point = term::viewport_to_point(display_offset, cell.point);\n\n if has_highlighted_hint {\n let hyperlink =\n cell.extra.as_ref().and_then(|extra| extra.hyperlink.as_ref());\n if highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n || vi_highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n {\n cell.flags.insert(Flags::UNDERLINE);\n // Damage hints for the current and next frames.\n damage_tracker.frame().damage_point(cell.point);\n damage_tracker.next_frame().damage_point(cell.point);\n }\n }\n\n // Update underline/strikeout.\n lines.update(&cell);\n\n cell\n }),\n );\n }\n\n let mut rects = lines.rects(&metrics, &size_info);\n\n if let Some(vi_cursor_point) = vi_cursor_point {\n // Indicate vi mode by showing the cursor's position in the top right corner.\n let line = (-vi_cursor_point.line.0 + size_info.bottommost_line().0) as usize;\n let obstructed_column = Some(vi_cursor_point)\n .filter(|point| point.line == -(display_offset as i32))\n .map(|point| point.column);\n self.draw_line_indicator(config, total_lines, obstructed_column, line);\n } else if search_state.regex().is_some() {\n // Show current display offset in vi-less search to indicate match position.\n self.draw_line_indicator(config, total_lines, None, display_offset);\n };\n\n // Draw cursor.\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n\n // Push visual bell after url/underline/strikeout rects.\n let visual_bell_intensity = self.visual_bell.intensity();\n if visual_bell_intensity != 0. {\n let visual_bell_rect = RenderRect::new(\n 0.,\n 0.,\n size_info.width(),\n size_info.height(),\n config.bell.color,\n visual_bell_intensity as f32,\n );\n rects.push(visual_bell_rect);\n }\n\n // Handle IME positioning and search bar rendering.\n let ime_position = match search_state.regex() {\n Some(regex) => {\n let search_label = match search_state.direction() {\n Direction::Right => FORWARD_SEARCH_LABEL,\n Direction::Left => BACKWARD_SEARCH_LABEL,\n };\n\n let search_text = Self::format_search(regex, search_label, size_info.columns());\n\n // Render the search bar.\n self.draw_search(config, &search_text);\n\n // Draw search bar cursor.\n let line = size_info.screen_lines();\n let column = Column(search_text.chars().count() - 1);\n\n // Add cursor to search bar if IME is not active.\n if self.ime.preedit().is_none() {\n let fg = config.colors.footer_bar_foreground();\n let shape = CursorShape::Underline;\n let cursor = RenderableCursor::new(Point::new(line, column), shape, fg, false);\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n }\n\n Some(Point::new(line, column))\n },\n None => {\n let num_lines = self.size_info.screen_lines();\n term::point_to_viewport(display_offset, cursor_point)\n .filter(|point| point.line < num_lines)\n },\n };\n\n // Handle IME.\n if self.ime.is_enabled() {\n if let Some(point) = ime_position {\n let (fg, bg) = if search_state.regex().is_some() {\n (config.colors.footer_bar_foreground(), config.colors.footer_bar_background())\n } else {\n (foreground_color, background_color)\n };\n\n self.draw_ime_preview(point, fg, bg, &mut rects, config);\n }\n }\n\n if let Some(message) = message_buffer.message() {\n let search_offset = usize::from(search_state.regex().is_some());\n let text = message.text(&size_info);\n\n // Create a new rectangle for the background.\n let start_line = size_info.screen_lines() + search_offset;\n let y = size_info.cell_height().mul_add(start_line as f32, size_info.padding_y());\n\n let bg = match message.ty() {\n MessageType::Error => config.colors.normal.red,\n MessageType::Warning => config.colors.normal.yellow,\n };\n\n let x = 0;\n let width = size_info.width() as i32;\n let height = (size_info.height() - y) as i32;\n let message_bar_rect =\n RenderRect::new(x as f32, y, width as f32, height as f32, bg, 1.);\n\n // Push message_bar in the end, so it'll be above all other content.\n rects.push(message_bar_rect);\n\n // Always damage message bar, since it could have messages of the same size in it.\n self.damage_tracker.frame().add_viewport_rect(&size_info, x, y as i32, width, height);\n\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n\n // Relay messages to the user.\n let glyph_cache = &mut self.glyph_cache;\n let fg = config.colors.primary.background;\n for (i, message_text) in text.iter().enumerate() {\n let point = Point::new(start_line + i, Column(0));\n self.renderer.draw_string(\n point,\n fg,\n bg,\n message_text.chars(),\n &size_info,\n glyph_cache,\n );\n }\n } else {\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n }\n\n self.draw_render_timer(config);\n\n // Draw hyperlink uri preview.\n if has_highlighted_hint {\n let cursor_point = vi_cursor_point.or(Some(cursor_point));\n self.draw_hyperlink_preview(config, cursor_point, display_offset);\n }\n\n // Notify winit that we're about to present.\n self.window.pre_present_notify();\n\n // Highlight damage for debugging.\n if self.damage_tracker.debug {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n let mut rects = Vec::with_capacity(damage.len());\n self.highlight_damage(&mut rects);\n self.renderer.draw_rects(&self.size_info, &metrics, rects);\n }\n\n // Clearing debug highlights from the previous frame requires full redraw.\n self.swap_buffers();\n\n if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n /// Update the mouse/vi mode cursor hint highlighting.\n ///\n /// This will return whether the highlighted hints changed.\n pub fn update_highlighted_hints(\n &mut self,\n term: &Term,\n config: &UiConfig,\n mouse: &Mouse,\n modifiers: ModifiersState,\n ) -> bool {\n // Update vi mode cursor hint.\n let vi_highlighted_hint = if term.mode().contains(TermMode::VI) {\n let mods = ModifiersState::all();\n let point = term.vi_mode_cursor.point;\n hint::highlighted_at(term, config, point, mods)\n } else {\n None\n };\n let mut dirty = vi_highlighted_hint != self.vi_highlighted_hint;\n self.vi_highlighted_hint = vi_highlighted_hint;\n\n // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n // Update cursor shape.\n if highlighted_hint.is_some() {\n // If mouse changed the line, we should update the hyperlink preview, since the\n // highlighted hint could be disrupted by the old preview.\n dirty = self.hint_mouse_point.map_or(false, |p| p.line != point.line);\n self.hint_mouse_point = Some(point);\n self.window.set_mouse_cursor(CursorIcon::Pointer);\n } else if self.highlighted_hint.is_some() {\n self.hint_mouse_point = None;\n if term.mode().intersects(TermMode::MOUSE_MODE) && !term.mode().contains(TermMode::VI) {\n self.window.set_mouse_cursor(CursorIcon::Default);\n } else {\n self.window.set_mouse_cursor(CursorIcon::Text);\n }\n }\n\n dirty |= self.highlighted_hint != highlighted_hint;\n self.highlighted_hint = highlighted_hint;\n\n dirty\n }\n\n #[inline(never)]\n fn draw_ime_preview(\n &mut self,\n point: Point,\n fg: Rgb,\n bg: Rgb,\n rects: &mut Vec,\n config: &UiConfig,\n ) {\n let preedit = match self.ime.preedit() {\n Some(preedit) => preedit,\n None => {\n // In case we don't have preedit, just set the popup point.\n self.window.update_ime_position(point, &self.size_info);\n return;\n },\n };\n\n let num_cols = self.size_info.columns();\n\n // Get the visible preedit.\n let visible_text: String = match (preedit.cursor_byte_offset, preedit.cursor_end_offset) {\n (Some(byte_offset), Some(end_offset)) if end_offset > num_cols => StrShortener::new(\n &preedit.text[byte_offset..],\n num_cols,\n ShortenDirection::Right,\n Some(SHORTENER),\n ),\n _ => {\n StrShortener::new(&preedit.text, num_cols, ShortenDirection::Left, Some(SHORTENER))\n },\n }\n .collect();\n\n let visible_len = visible_text.chars().count();\n\n let end = cmp::min(point.column.0 + visible_len, num_cols);\n let start = end.saturating_sub(visible_len);\n\n let start = Point::new(point.line, Column(start));\n let end = Point::new(point.line, Column(end - 1));\n\n let glyph_cache = &mut self.glyph_cache;\n let metrics = glyph_cache.font_metrics();\n\n self.renderer.draw_string(\n start,\n fg,\n bg,\n visible_text.chars(),\n &self.size_info,\n glyph_cache,\n );\n\n // Damage preedit inside the terminal viewport.\n if self.collect_damage() && point.line < self.size_info.screen_lines() {\n let damage = LineDamageBounds::new(start.line, 0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n // Add underline for preedit text.\n let underline = RenderLine { start, end, color: fg };\n rects.extend(underline.rects(Flags::UNDERLINE, &metrics, &self.size_info));\n\n let ime_popup_point = match preedit.cursor_end_offset {\n Some(cursor_end_offset) if cursor_end_offset != 0 => {\n let is_wide = preedit.text[preedit.cursor_byte_offset.unwrap_or_default()..]\n .chars()\n .next()\n .map(|ch| ch.width() == Some(2))\n .unwrap_or_default();\n\n let cursor_column = Column(\n (end.column.0 as isize - cursor_end_offset as isize + 1).max(0) as usize,\n );\n let cursor_point = Point::new(point.line, cursor_column);\n let cursor =\n RenderableCursor::new(cursor_point, CursorShape::HollowBlock, fg, is_wide);\n rects.extend(cursor.rects(&self.size_info, config.cursor.thickness()));\n cursor_point\n },\n _ => end,\n };\n\n self.window.update_ime_position(ime_popup_point, &self.size_info);\n }\n\n /// Format search regex to account for the cursor and fullwidth characters.\n fn format_search(search_regex: &str, search_label: &str, max_width: usize) -> String {\n let label_len = search_label.len();\n\n // Skip `search_regex` formatting if only label is visible.\n if label_len > max_width {\n return search_label[..max_width].to_owned();\n }\n\n // The search string consists of `search_label` + `search_regex` + `cursor`.\n let mut bar_text = String::from(search_label);\n bar_text.extend(StrShortener::new(\n search_regex,\n max_width.wrapping_sub(label_len + 1),\n ShortenDirection::Left,\n Some(SHORTENER),\n ));\n\n // Add place for cursor.\n bar_text.push(' ');\n\n bar_text\n }\n\n /// Draw preview for the currently highlighted `Hyperlink`.\n #[inline(never)]\n fn draw_hyperlink_preview(\n &mut self,\n config: &UiConfig,\n cursor_point: Option,\n display_offset: usize,\n ) {\n let num_cols = self.size_info.columns();\n let uris: Vec<_> = self\n .highlighted_hint\n .iter()\n .chain(&self.vi_highlighted_hint)\n .filter_map(|hint| hint.hyperlink().map(|hyperlink| hyperlink.uri()))\n .map(|uri| StrShortener::new(uri, num_cols, ShortenDirection::Right, Some(SHORTENER)))\n .collect();\n\n if uris.is_empty() {\n return;\n }\n\n // The maximum amount of protected lines including the ones we'll show preview on.\n let max_protected_lines = uris.len() * 2;\n\n // Lines we shouldn't show preview on, because it'll obscure the highlighted hint.\n let mut protected_lines = Vec::with_capacity(max_protected_lines);\n if self.size_info.screen_lines() > max_protected_lines {\n // Prefer to show preview even when it'll likely obscure the highlighted hint, when\n // there's no place left for it.\n protected_lines.push(self.hint_mouse_point.map(|point| point.line));\n protected_lines.push(cursor_point.map(|point| point.line));\n }\n\n // Find the line in viewport we can draw preview on without obscuring protected lines.\n let viewport_bottom = self.size_info.bottommost_line() - Line(display_offset as i32);\n let viewport_top = viewport_bottom - (self.size_info.screen_lines() - 1);\n let uri_lines = (viewport_top.0..=viewport_bottom.0)\n .rev()\n .map(|line| Some(Line(line)))\n .filter_map(|line| {\n if protected_lines.contains(&line) {\n None\n } else {\n protected_lines.push(line);\n line\n }\n })\n .take(uris.len())\n .flat_map(|line| term::point_to_viewport(display_offset, Point::new(line, Column(0))));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n for (uri, point) in uris.into_iter().zip(uri_lines) {\n // Damage the uri preview.\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n\n // Damage the uri preview for the next frame as well.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n self.renderer.draw_string(point, fg, bg, uri, &self.size_info, &mut self.glyph_cache);\n }\n }\n\n /// Draw current search regex.\n #[inline(never)]\n fn draw_search(&mut self, config: &UiConfig, text: &str) {\n // Assure text length is at least num_cols.\n let num_cols = self.size_info.columns();\n let text = format!(\"{:<1$}\", text, num_cols);\n\n let point = Point::new(self.size_info.screen_lines(), Column(0));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n\n self.renderer.draw_string(\n point,\n fg,\n bg,\n text.chars(),\n &self.size_info,\n &mut self.glyph_cache,\n );\n }\n\n /// Draw render timer.\n #[inline(never)]\n fn draw_render_timer(&mut self, config: &UiConfig) {\n if !config.debug.render_timer {\n return;\n }\n\n let timing = format!(\"{:.3} usec\", self.meter.average());\n let point = Point::new(self.size_info.screen_lines().saturating_sub(2), Column(0));\n let fg = config.colors.primary.background;\n let bg = config.colors.normal.red;\n\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, timing.len());\n self.damage_tracker.frame().damage_line(damage);\n // Damage the render timer for the next frame.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, timing.chars(), &self.size_info, glyph_cache);\n }\n\n /// Draw an indicator for the position of a line in history.\n #[inline(never)]\n fn draw_line_indicator(\n &mut self,\n config: &UiConfig,\n total_lines: usize,\n obstructed_column: Option,\n line: usize,\n ) {\n let columns = self.size_info.columns();\n let text = format!(\"[{}/{}]\", line, total_lines - 1);\n let column = Column(self.size_info.columns().saturating_sub(text.len()));\n let point = Point::new(0, column);\n\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, columns - 1);\n self.damage_tracker.frame().damage_line(damage);\n // Damage it on the next frame in case it goes away.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let colors = &config.colors;\n let fg = colors.line_indicator.foreground.unwrap_or(colors.primary.background);\n let bg = colors.line_indicator.background.unwrap_or(colors.primary.foreground);\n\n // Do not render anything if it would obscure the vi mode cursor.\n if obstructed_column.map_or(true, |obstructed_column| obstructed_column < column) {\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, text.chars(), &self.size_info, glyph_cache);\n }\n }\n\n /// Returns `true` if damage information should be collected, `false` otherwise.\n #[inline]\n fn collect_damage(&self) -> bool {\n matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) || self.damage_tracker.debug\n }\n\n /// Highlight damaged rects.\n ///\n /// This function is for debug purposes only.\n fn highlight_damage(&self, render_rects: &mut Vec) {\n for damage_rect in &self.damage_tracker.shape_frame_damage(self.size_info.into()) {\n let x = damage_rect.x as f32;\n let height = damage_rect.height as f32;\n let width = damage_rect.width as f32;\n let y = damage_y_to_viewport_y(&self.size_info, damage_rect) as f32;\n let render_rect = RenderRect::new(x, y, width, height, DAMAGE_RECT_COLOR, 0.5);\n\n render_rects.push(render_rect);\n }\n }\n\n /// Request a new frame for a window on Wayland.\n fn request_frame(&mut self, scheduler: &mut Scheduler) {\n // Mark that we've used a frame.\n self.window.has_frame = false;\n\n // Get the display vblank interval.\n let monitor_vblank_interval = 1_000_000.\n / self\n .window\n .current_monitor()\n .and_then(|monitor| monitor.refresh_rate_millihertz())\n .unwrap_or(60_000) as f64;\n\n // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\nimpl Drop for Display {\n fn drop(&mut self) {\n // Switch OpenGL context before dropping, otherwise objects (like programs) from other\n // contexts might be deleted when dropping renderer.\n self.make_current();\n unsafe {\n ManuallyDrop::drop(&mut self.renderer);\n ManuallyDrop::drop(&mut self.context);\n ManuallyDrop::drop(&mut self.surface);\n }\n }\n}\n\n/// Input method state.\n#[derive(Debug, Default)]\npub struct Ime {\n /// Whether the IME is enabled.\n enabled: bool,\n\n /// Current IME preedit.\n preedit: Option,\n}\n\nimpl Ime {\n #[inline]\n pub fn set_enabled(&mut self, is_enabled: bool) {\n if is_enabled {\n self.enabled = is_enabled\n } else {\n // Clear state when disabling IME.\n *self = Default::default();\n }\n }\n\n #[inline]\n pub fn is_enabled(&self) -> bool {\n self.enabled\n }\n\n #[inline]\n pub fn set_preedit(&mut self, preedit: Option) {\n self.preedit = preedit;\n }\n\n #[inline]\n pub fn preedit(&self) -> Option<&Preedit> {\n self.preedit.as_ref()\n }\n}\n\n#[derive(Debug, Default, PartialEq, Eq)]\npub struct Preedit {\n /// The preedit text.\n text: String,\n\n /// Byte offset for cursor start into the preedit text.\n ///\n /// `None` means that the cursor is invisible.\n cursor_byte_offset: Option,\n\n /// The cursor offset from the end of the preedit in char width.\n cursor_end_offset: Option,\n}\n\nimpl Preedit {\n pub fn new(text: String, cursor_byte_offset: Option) -> Self {\n let cursor_end_offset = if let Some(byte_offset) = cursor_byte_offset {\n // Convert byte offset into char offset.\n let cursor_end_offset =\n text[byte_offset..].chars().fold(0, |acc, ch| acc + ch.width().unwrap_or(1));\n\n Some(cursor_end_offset)\n } else {\n None\n };\n\n Self { text, cursor_byte_offset, cursor_end_offset }\n }\n}\n\n/// Pending renderer updates.\n///\n/// All renderer updates are cached to be applied just before rendering, to avoid platform-specific\n/// rendering issues.\n#[derive(Debug, Default, Copy, Clone)]\npub struct RendererUpdate {\n /// Should resize the window.\n resize: bool,\n\n /// Clear font caches.\n clear_font_cache: bool,\n}\n\n/// Struct for safe in-place replacement.\n///\n/// This struct allows easily replacing struct fields that provide `self -> Self` methods in-place,\n/// without having to deal with constantly unwrapping the underlying [`Option`].\nstruct Replaceable(Option);\n\nimpl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\nimpl Deref for Replaceable {\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n self.get()\n }\n}\n\nimpl DerefMut for Replaceable {\n fn deref_mut(&mut self) -> &mut Self::Target {\n self.get_mut()\n }\n}\n\n/// The frame timer state.\npub struct FrameTimer {\n /// Base timestamp used to compute sync points.\n base: Instant,\n\n /// The last timestamp we synced to.\n last_synced_timestamp: Instant,\n\n /// The refresh rate we've used to compute sync timestamps.\n refresh_interval: Duration,\n}\n\nimpl FrameTimer {\n pub fn new() -> Self {\n let now = Instant::now();\n Self { base: now, last_synced_timestamp: now, refresh_interval: Duration::ZERO }\n }\n\n /// Compute the delay that we should use to achieve the target frame\n /// rate.\n pub fn compute_timeout(&mut self, refresh_interval: Duration) -> Duration {\n let now = Instant::now();\n\n // Handle refresh rate change.\n if self.refresh_interval != refresh_interval {\n self.base = now;\n self.last_synced_timestamp = now;\n self.refresh_interval = refresh_interval;\n return refresh_interval;\n }\n\n let next_frame = self.last_synced_timestamp + self.refresh_interval;\n\n if next_frame < now {\n // Redraw immediately if we haven't drawn in over `refresh_interval` microseconds.\n let elapsed_micros = (now - self.base).as_micros() as u64;\n let refresh_micros = self.refresh_interval.as_micros() as u64;\n self.last_synced_timestamp =\n now - Duration::from_micros(elapsed_micros % refresh_micros);\n Duration::ZERO\n } else {\n // Redraw on the next `refresh_interval` clock tick.\n self.last_synced_timestamp = next_frame;\n next_frame - now\n }\n }\n}\n\n/// Calculate the cell dimensions based on font metrics.\n///\n/// This will return a tuple of the cell width and height.\n#[inline]\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n let offset_x = f64::from(config.font.offset.x);\n let offset_y = f64::from(config.font.offset.y);\n (\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n )\n}\n\n/// Calculate the size of the window given padding, terminal dimensions and cell size.\nfn window_size(\n config: &UiConfig,\n dimensions: Dimensions,\n cell_width: f32,\n cell_height: f32,\n scale_factor: f32,\n) -> PhysicalSize {\n let padding = config.window.padding(scale_factor);\n\n let grid_width = cell_width * dimensions.columns.max(MIN_COLUMNS) as f32;\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n PhysicalSize::new(width as u32, height as u32)\n}\n", + "chunks": [ + { + "chunk_id": "doc_70_chunk_0", + "original_index": 0, + "content": "//! The display subsystem including window management, font rasterization, and\n//! GPU drawing.\n\nuse std::cmp;\nuse std::fmt::{self, Formatter};\nuse std::mem::{self, ManuallyDrop};\nuse std::num::NonZeroU32;\nuse std::ops::{Deref, DerefMut};\nuse std::time::{Duration, Instant};\n\nuse glutin::context::{NotCurrentContext, PossiblyCurrentContext};\nuse glutin::prelude::*;\nuse glutin::surface::{Surface, SwapInterval, WindowSurface};\n\nuse log::{debug, info};\nuse parking_lot::MutexGuard;\nuse raw_window_handle::RawWindowHandle;\nuse serde::{Deserialize, Serialize};\nuse winit::dpi::PhysicalSize;\nuse winit::keyboard::ModifiersState;\nuse winit::window::CursorIcon;\n\nuse crossfont::{Rasterize, Rasterizer, Size as FontSize};\nuse unicode_width::UnicodeWidthChar;\n\n" + }, + { + "chunk_id": "doc_70_chunk_1", + "original_index": 1, + "content": "use alacritty_terminal::event::{EventListener, OnResize, WindowSize};\nuse alacritty_terminal::grid::Dimensions as TermDimensions;\nuse alacritty_terminal::index::{Column, Direction, Line, Point};\nuse alacritty_terminal::selection::Selection;\nuse alacritty_terminal::term::cell::Flags;\nuse alacritty_terminal::term::{\n self, point_to_viewport, LineDamageBounds, Term, TermDamage, TermMode, MIN_COLUMNS,\n MIN_SCREEN_LINES,\n};\nuse alacritty_terminal::vte::ansi::{CursorShape, NamedColor};\n\n" + }, + { + "chunk_id": "doc_70_chunk_2", + "original_index": 2, + "content": "use crate::config::font::Font;\nuse crate::config::window::Dimensions;\n#[cfg(not(windows))]\nuse crate::config::window::StartupMode;\nuse crate::config::UiConfig;\nuse crate::display::bell::VisualBell;\nuse crate::display::color::{List, Rgb};\nuse crate::display::content::{RenderableContent, RenderableCursor};\nuse crate::display::cursor::IntoRects;\nuse crate::display::damage::{damage_y_to_viewport_y, DamageTracker};\nuse crate::display::hint::{HintMatch, HintState};\nuse crate::display::meter::Meter;\nuse crate::display::window::Window;\nuse crate::event::{Event, EventType, Mouse, SearchState};\nuse crate::message_bar::{MessageBuffer, MessageType};\nuse crate::renderer::rects::{RenderLine, RenderLines, RenderRect};\nuse crate::renderer::{self, GlyphCache, Renderer};\nuse crate::scheduler::{Scheduler, TimerId, Topic};\nuse crate::string::{ShortenDirection, StrShortener};\n\n" + }, + { + "chunk_id": "doc_70_chunk_3", + "original_index": 3, + "content": "pub mod color;\npub mod content;\npub mod cursor;\npub mod hint;\npub mod window;\n\nmod bell;\nmod damage;\nmod meter;\n\n/// Label for the forward terminal search bar.\nconst FORWARD_SEARCH_LABEL: &str = \"Search: \";\n\n/// Label for the backward terminal search bar.\nconst BACKWARD_SEARCH_LABEL: &str = \"Backward Search: \";\n\n/// The character used to shorten the visible text like uri preview or search regex.\nconst SHORTENER: char = '\u2026';\n\n/// Color which is used to highlight damaged rects when debugging.\nconst DAMAGE_RECT_COLOR: Rgb = Rgb::new(255, 0, 255);\n\n#[derive(Debug)]\npub enum Error {\n /// Error with window management.\n Window(window::Error),\n\n" + }, + { + "chunk_id": "doc_70_chunk_4", + "original_index": 4, + "content": " /// Error dealing with fonts.\n Font(crossfont::Error),\n\n /// Error in renderer.\n Render(renderer::Error),\n\n /// Error during context operations.\n Context(glutin::error::Error),\n}\n\nimpl std::error::Error for Error {\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n match self {\n Error::Window(err) => err.source(),\n Error::Font(err) => err.source(),\n Error::Render(err) => err.source(),\n Error::Context(err) => err.source(),\n }\n }\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_5", + "original_index": 5, + "content": "impl fmt::Display for Error {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n match self {\n Error::Window(err) => err.fmt(f),\n Error::Font(err) => err.fmt(f),\n Error::Render(err) => err.fmt(f),\n Error::Context(err) => err.fmt(f),\n }\n }\n}\n\nimpl From for Error {\n fn from(val: window::Error) -> Self {\n Error::Window(val)\n }\n}\n\nimpl From for Error {\n fn from(val: crossfont::Error) -> Self {\n Error::Font(val)\n }\n}\n\nimpl From for Error {\n fn from(val: renderer::Error) -> Self {\n Error::Render(val)\n }\n}\n\nimpl From for Error {\n fn from(val: glutin::error::Error) -> Self {\n Error::Context(val)\n }\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_6", + "original_index": 6, + "content": "/// Terminal size info.\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]\npub struct SizeInfo {\n /// Terminal window width.\n width: T,\n\n /// Terminal window height.\n height: T,\n\n /// Width of individual cell.\n cell_width: T,\n\n /// Height of individual cell.\n cell_height: T,\n\n /// Horizontal window padding.\n padding_x: T,\n\n /// Vertical window padding.\n padding_y: T,\n\n /// Number of lines in the viewport.\n screen_lines: usize,\n\n /// Number of columns in the viewport.\n columns: usize,\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_7", + "original_index": 7, + "content": "impl From> for SizeInfo {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n width: size_info.width as u32,\n height: size_info.height as u32,\n cell_width: size_info.cell_width as u32,\n cell_height: size_info.cell_height as u32,\n padding_x: size_info.padding_x as u32,\n padding_y: size_info.padding_y as u32,\n screen_lines: size_info.screen_lines,\n columns: size_info.screen_lines,\n }\n }\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_8", + "original_index": 8, + "content": "impl From> for WindowSize {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n num_cols: size_info.columns() as u16,\n num_lines: size_info.screen_lines() as u16,\n cell_width: size_info.cell_width() as u16,\n cell_height: size_info.cell_height() as u16,\n }\n }\n}\n\nimpl SizeInfo {\n #[inline]\n pub fn width(&self) -> T {\n self.width\n }\n\n #[inline]\n pub fn height(&self) -> T {\n self.height\n }\n\n #[inline]\n pub fn cell_width(&self) -> T {\n self.cell_width\n }\n\n #[inline]\n pub fn cell_height(&self) -> T {\n self.cell_height\n }\n\n #[inline]\n pub fn padding_x(&self) -> T {\n self.padding_x\n }\n\n #[inline]\n pub fn padding_y(&self) -> T {\n self.padding_y\n }\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_9", + "original_index": 9, + "content": "impl SizeInfo {\n #[allow(clippy::too_many_arguments)]\n pub fn new(\n width: f32,\n height: f32,\n cell_width: f32,\n cell_height: f32,\n mut padding_x: f32,\n mut padding_y: f32,\n dynamic_padding: bool,\n ) -> SizeInfo {\n if dynamic_padding {\n padding_x = Self::dynamic_padding(padding_x.floor(), width, cell_width);\n padding_y = Self::dynamic_padding(padding_y.floor(), height, cell_height);\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_10", + "original_index": 10, + "content": " let lines = (height - 2. * padding_y) / cell_height;\n let screen_lines = cmp::max(lines as usize, MIN_SCREEN_LINES);\n\n let columns = (width - 2. * padding_x) / cell_width;\n let columns = cmp::max(columns as usize, MIN_COLUMNS);\n\n SizeInfo {\n width,\n height,\n cell_width,\n cell_height,\n padding_x: padding_x.floor(),\n padding_y: padding_y.floor(),\n screen_lines,\n columns,\n }\n }\n\n #[inline]\n pub fn reserve_lines(&mut self, count: usize) {\n self.screen_lines = cmp::max(self.screen_lines.saturating_sub(count), MIN_SCREEN_LINES);\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_11", + "original_index": 11, + "content": " /// Check if coordinates are inside the terminal grid.\n ///\n /// The padding, message bar or search are not counted as part of the grid.\n #[inline]\n pub fn contains_point(&self, x: usize, y: usize) -> bool {\n x <= (self.padding_x + self.columns as f32 * self.cell_width) as usize\n && x > self.padding_x as usize\n && y <= (self.padding_y + self.screen_lines as f32 * self.cell_height) as usize\n && y > self.padding_y as usize\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_12", + "original_index": 12, + "content": " /// Calculate padding to spread it evenly around the terminal content.\n #[inline]\n fn dynamic_padding(padding: f32, dimension: f32, cell_dimension: f32) -> f32 {\n padding + ((dimension - 2. * padding) % cell_dimension) / 2.\n }\n}\n\nimpl TermDimensions for SizeInfo {\n #[inline]\n fn columns(&self) -> usize {\n self.columns\n }\n\n #[inline]\n fn screen_lines(&self) -> usize {\n self.screen_lines\n }\n\n #[inline]\n fn total_lines(&self) -> usize {\n self.screen_lines()\n }\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_13", + "original_index": 13, + "content": "#[derive(Default, Clone, Debug, PartialEq, Eq)]\npub struct DisplayUpdate {\n pub dirty: bool,\n\n dimensions: Option>,\n cursor_dirty: bool,\n font: Option,\n}\n\nimpl DisplayUpdate {\n pub fn dimensions(&self) -> Option> {\n self.dimensions\n }\n\n pub fn font(&self) -> Option<&Font> {\n self.font.as_ref()\n }\n\n pub fn cursor_dirty(&self) -> bool {\n self.cursor_dirty\n }\n\n pub fn set_dimensions(&mut self, dimensions: PhysicalSize) {\n self.dimensions = Some(dimensions);\n self.dirty = true;\n }\n\n pub fn set_font(&mut self, font: Font) {\n self.font = Some(font);\n self.dirty = true;\n }\n\n pub fn set_cursor_dirty(&mut self) {\n self.cursor_dirty = true;\n self.dirty = true;\n }\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_14", + "original_index": 14, + "content": "/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n" + }, + { + "chunk_id": "doc_70_chunk_15", + "original_index": 15, + "content": " /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n" + }, + { + "chunk_id": "doc_70_chunk_16", + "original_index": 16, + "content": " let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_17", + "original_index": 17, + "content": " // Create the GL surface to draw into.\n let surface = renderer::platform::create_gl_surface(\n &gl_context,\n window.inner_size(),\n window.raw_window_handle(),\n )?;\n\n // Make the context current.\n let context = gl_context.make_current(&surface)?;\n\n // Create renderer.\n let mut renderer = Renderer::new(&context, config.debug.renderer)?;\n\n" + }, + { + "chunk_id": "doc_70_chunk_18", + "original_index": 18, + "content": " // Load font common glyphs to accelerate rendering.\n debug!(\"Filling glyph cache with common glyphs\");\n renderer.with_loader(|mut api| {\n glyph_cache.reset_glyph_cache(&mut api);\n });\n\n let padding = config.window.padding(window.scale_factor as f32);\n let viewport_size = window.inner_size();\n\n // Create new size with at least one column and row.\n let size_info = SizeInfo::new(\n viewport_size.width as f32,\n viewport_size.height as f32,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding && config.window.dimensions().is_none(),\n );\n\n" + }, + { + "chunk_id": "doc_70_chunk_19", + "original_index": 19, + "content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n info!(\"Padding: {} x {}\", size_info.padding_x(), size_info.padding_y());\n info!(\"Width: {}, Height: {}\", size_info.width(), size_info.height());\n\n // Update OpenGL projection.\n renderer.resize(&size_info);\n\n // Clear screen.\n let background_color = config.colors.primary.background;\n renderer.clear(background_color, config.window_opacity());\n\n // Disable shadows for transparent windows on macOS.\n #[cfg(target_os = \"macos\")]\n window.set_has_shadow(config.window_opacity() >= 1.0);\n\n let is_wayland = matches!(raw_window_handle, RawWindowHandle::Wayland(_));\n\n" + }, + { + "chunk_id": "doc_70_chunk_20", + "original_index": 20, + "content": " // On Wayland we can safely ignore this call, since the window isn't visible until you\n // actually draw something into it and commit those changes.\n if !is_wayland {\n surface.swap_buffers(&context).expect(\"failed to swap buffers.\");\n renderer.finish();\n }\n\n // Set resize increments for the newly created window.\n if config.window.resize_increments {\n window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n window.set_visible(true);\n\n" + }, + { + "chunk_id": "doc_70_chunk_21", + "original_index": 21, + "content": " #[allow(clippy::single_match)]\n #[cfg(not(windows))]\n if !_tabbed {\n match config.window.startup_mode {\n #[cfg(target_os = \"macos\")]\n StartupMode::SimpleFullscreen => window.set_simple_fullscreen(true),\n StartupMode::Maximized if !is_wayland => window.set_maximized(true),\n _ => (),\n }\n }\n\n let hint_state = HintState::new(config.hints.alphabet());\n\n let mut damage_tracker = DamageTracker::new(size_info.screen_lines(), size_info.columns());\n damage_tracker.debug = config.debug.highlight_damage;\n\n // Disable vsync.\n if let Err(err) = surface.set_swap_interval(&context, SwapInterval::DontWait) {\n info!(\"Failed to disable vsync: {}\", err);\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_22", + "original_index": 22, + "content": " Ok(Self {\n context: ManuallyDrop::new(Replaceable::new(context)),\n visual_bell: VisualBell::from(&config.bell),\n renderer: ManuallyDrop::new(renderer),\n surface: ManuallyDrop::new(surface),\n colors: List::from(&config.colors),\n frame_timer: FrameTimer::new(),\n raw_window_handle,\n damage_tracker,\n glyph_cache,\n hint_state,\n size_info,\n font_size,\n window,\n pending_renderer_update: Default::default(),\n vi_highlighted_hint: Default::default(),\n highlighted_hint: Default::default(),\n hint_mouse_point: Default::default(),\n pending_update: Default::default(),\n cursor_hidden: Default::default(),\n meter: Default::default(),\n ime: Default::default(),\n })\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_23", + "original_index": 23, + "content": " #[inline]\n pub fn gl_context(&self) -> &PossiblyCurrentContext {\n self.context.get()\n }\n\n pub fn make_not_current(&mut self) {\n if self.context.get().is_current() {\n self.context.replace_with(|context| {\n context\n .make_not_current()\n .expect(\"failed to disable context\")\n .treat_as_possibly_current()\n });\n }\n }\n\n pub fn make_current(&self) {\n if !self.context.get().is_current() {\n self.context.make_current(&self.surface).expect(\"failed to make context current\")\n }\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_24", + "original_index": 24, + "content": " fn swap_buffers(&self) {\n #[allow(clippy::single_match)]\n let res = match (self.surface.deref(), &self.context.get()) {\n #[cfg(not(any(target_os = \"macos\", windows)))]\n (Surface::Egl(surface), PossiblyCurrentContext::Egl(context))\n if matches!(self.raw_window_handle, RawWindowHandle::Wayland(_))\n && !self.damage_tracker.debug =>\n {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n surface.swap_buffers_with_damage(context, &damage)\n },\n (surface, context) => surface.swap_buffers(context),\n };\n if let Err(err) = res {\n debug!(\"error calling swap_buffers: {}\", err);\n }\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_25", + "original_index": 25, + "content": " /// Update font size and cell dimensions.\n ///\n /// This will return a tuple of the cell width and height.\n fn update_font_size(\n glyph_cache: &mut GlyphCache,\n config: &UiConfig,\n font: &Font,\n ) -> (f32, f32) {\n let _ = glyph_cache.update_font_size(font);\n\n // Compute new cell sizes.\n compute_cell_size(config, &glyph_cache.font_metrics())\n }\n\n /// Reset glyph cache.\n fn reset_glyph_cache(&mut self) {\n let cache = &mut self.glyph_cache;\n self.renderer.with_loader(|mut api| {\n cache.reset_glyph_cache(&mut api);\n });\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_26", + "original_index": 26, + "content": " // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n" + }, + { + "chunk_id": "doc_70_chunk_27", + "original_index": 27, + "content": " let (mut cell_width, mut cell_height) =\n (self.size_info.cell_width(), self.size_info.cell_height());\n\n if pending_update.font().is_some() || pending_update.cursor_dirty() {\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.clear_font_cache = true\n }\n\n // Update font size and cell dimensions.\n if let Some(font) = pending_update.font() {\n let cell_dimensions = Self::update_font_size(&mut self.glyph_cache, config, font);\n cell_width = cell_dimensions.0;\n cell_height = cell_dimensions.1;\n\n" + }, + { + "chunk_id": "doc_70_chunk_28", + "original_index": 28, + "content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n\n // Mark entire terminal as damaged since glyph size could change without cell size\n // changes.\n self.damage_tracker.frame().mark_fully_damaged();\n }\n\n let (mut width, mut height) = (self.size_info.width(), self.size_info.height());\n if let Some(dimensions) = pending_update.dimensions() {\n width = dimensions.width as f32;\n height = dimensions.height as f32;\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_29", + "original_index": 29, + "content": " let padding = config.window.padding(self.window.scale_factor as f32);\n\n let mut new_size = SizeInfo::new(\n width,\n height,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding,\n );\n\n // Update number of column/lines in the viewport.\n let search_active = search_state.history_index.is_some();\n let message_bar_lines = message_buffer.message().map_or(0, |m| m.text(&new_size).len());\n let search_lines = usize::from(search_active);\n new_size.reserve_lines(message_bar_lines + search_lines);\n\n" + }, + { + "chunk_id": "doc_70_chunk_30", + "original_index": 30, + "content": " // Update resize increments.\n if config.window.resize_increments {\n self.window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n // Resize when terminal when its dimensions have changed.\n if self.size_info.screen_lines() != new_size.screen_lines\n || self.size_info.columns() != new_size.columns()\n {\n // Resize PTY.\n pty_resize_handle.on_resize(new_size.into());\n\n" + }, + { + "chunk_id": "doc_70_chunk_31", + "original_index": 31, + "content": " // Resize terminal.\n terminal.resize(new_size);\n\n // Resize damage tracking.\n self.damage_tracker.resize(new_size.screen_lines(), new_size.columns());\n }\n\n // Check if dimensions have changed.\n if new_size != self.size_info {\n // Queue renderer update.\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.resize = true;\n\n // Clear focused search match.\n search_state.clear_focused_match();\n }\n self.size_info = new_size;\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_32", + "original_index": 32, + "content": " // NOTE: Renderer updates are split off, since platforms like Wayland require resize and other\n // OpenGL operations to be performed right before rendering. Otherwise they could lock the\n // back buffer and render with the previous state. This also solves flickering during resizes.\n //\n /// Update the state of the renderer.\n pub fn process_renderer_update(&mut self) {\n let renderer_update = match self.pending_renderer_update.take() {\n Some(renderer_update) => renderer_update,\n _ => return,\n };\n\n" + }, + { + "chunk_id": "doc_70_chunk_33", + "original_index": 33, + "content": " // Resize renderer.\n if renderer_update.resize {\n let width = NonZeroU32::new(self.size_info.width() as u32).unwrap();\n let height = NonZeroU32::new(self.size_info.height() as u32).unwrap();\n self.surface.resize(&self.context, width, height);\n }\n\n // Ensure we're modifying the correct OpenGL context.\n self.make_current();\n\n if renderer_update.clear_font_cache {\n self.reset_glyph_cache();\n }\n\n self.renderer.resize(&self.size_info);\n\n info!(\"Padding: {} x {}\", self.size_info.padding_x(), self.size_info.padding_y());\n info!(\"Width: {}, Height: {}\", self.size_info.width(), self.size_info.height());\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_34", + "original_index": 34, + "content": " /// Draw the screen.\n ///\n /// A reference to Term whose state is being drawn must be provided.\n ///\n /// This call may block if vsync is enabled.\n pub fn draw(\n &mut self,\n mut terminal: MutexGuard<'_, Term>,\n scheduler: &mut Scheduler,\n message_buffer: &MessageBuffer,\n config: &UiConfig,\n search_state: &mut SearchState,\n ) {\n // Collect renderable content before the terminal is dropped.\n let mut content = RenderableContent::new(config, self, &terminal, search_state);\n" + }, + { + "chunk_id": "doc_70_chunk_35", + "original_index": 35, + "content": " let mut grid_cells = Vec::new();\n for cell in &mut content {\n grid_cells.push(cell);\n }\n let selection_range = content.selection_range();\n let foreground_color = content.color(NamedColor::Foreground as usize);\n let background_color = content.color(NamedColor::Background as usize);\n let display_offset = content.display_offset();\n let cursor = content.cursor();\n\n let cursor_point = terminal.grid().cursor.point;\n let total_lines = terminal.grid().total_lines();\n let metrics = self.glyph_cache.font_metrics();\n let size_info = self.size_info;\n\n let vi_mode = terminal.mode().contains(TermMode::VI);\n let vi_cursor_point = if vi_mode { Some(terminal.vi_mode_cursor.point) } else { None };\n\n" + }, + { + "chunk_id": "doc_70_chunk_36", + "original_index": 36, + "content": " // Add damage from the terminal.\n if self.collect_damage() {\n match terminal.damage() {\n TermDamage::Full => self.damage_tracker.frame().mark_fully_damaged(),\n TermDamage::Partial(damaged_lines) => {\n for damage in damaged_lines {\n self.damage_tracker.frame().damage_line(damage);\n }\n },\n }\n terminal.reset_damage();\n }\n\n // Drop terminal as early as possible to free lock.\n drop(terminal);\n\n // Add damage from alacritty's UI elements overlapping terminal.\n if self.collect_damage() {\n let requires_full_damage = self.visual_bell.intensity() != 0.\n || self.hint_state.active()\n || search_state.regex().is_some();\n\n" + }, + { + "chunk_id": "doc_70_chunk_37", + "original_index": 37, + "content": " if requires_full_damage {\n self.damage_tracker.frame().mark_fully_damaged();\n self.damage_tracker.next_frame().mark_fully_damaged();\n }\n\n let vi_cursor_viewport_point =\n vi_cursor_point.and_then(|cursor| point_to_viewport(display_offset, cursor));\n\n self.damage_tracker.damage_vi_cursor(vi_cursor_viewport_point);\n self.damage_tracker.damage_selection(selection_range, display_offset);\n }\n\n // Make sure this window's OpenGL context is active.\n self.make_current();\n\n self.renderer.clear(background_color, config.window_opacity());\n let mut lines = RenderLines::new();\n\n // Optimize loop hint comparator.\n let has_highlighted_hint =\n self.highlighted_hint.is_some() || self.vi_highlighted_hint.is_some();\n\n" + }, + { + "chunk_id": "doc_70_chunk_38", + "original_index": 38, + "content": " // Draw grid.\n {\n let _sampler = self.meter.sampler();\n\n // Ensure macOS hasn't reset our viewport.\n #[cfg(target_os = \"macos\")]\n self.renderer.set_viewport(&size_info);\n\n let glyph_cache = &mut self.glyph_cache;\n let highlighted_hint = &self.highlighted_hint;\n let vi_highlighted_hint = &self.vi_highlighted_hint;\n let damage_tracker = &mut self.damage_tracker;\n\n self.renderer.draw_cells(\n &size_info,\n glyph_cache,\n grid_cells.into_iter().map(|mut cell| {\n // Underline hints hovered by mouse or vi mode cursor.\n let point = term::viewport_to_point(display_offset, cell.point);\n\n" + }, + { + "chunk_id": "doc_70_chunk_39", + "original_index": 39, + "content": " if has_highlighted_hint {\n let hyperlink =\n cell.extra.as_ref().and_then(|extra| extra.hyperlink.as_ref());\n if highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n || vi_highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n {\n cell.flags.insert(Flags::UNDERLINE);\n // Damage hints for the current and next frames.\n damage_tracker.frame().damage_point(cell.point);\n damage_tracker.next_frame().damage_point(cell.point);\n }\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_40", + "original_index": 40, + "content": " // Update underline/strikeout.\n lines.update(&cell);\n\n cell\n }),\n );\n }\n\n let mut rects = lines.rects(&metrics, &size_info);\n\n if let Some(vi_cursor_point) = vi_cursor_point {\n // Indicate vi mode by showing the cursor's position in the top right corner.\n let line = (-vi_cursor_point.line.0 + size_info.bottommost_line().0) as usize;\n let obstructed_column = Some(vi_cursor_point)\n .filter(|point| point.line == -(display_offset as i32))\n .map(|point| point.column);\n self.draw_line_indicator(config, total_lines, obstructed_column, line);\n } else if search_state.regex().is_some() {\n // Show current display offset in vi-less search to indicate match position.\n self.draw_line_indicator(config, total_lines, None, display_offset);\n };\n\n" + }, + { + "chunk_id": "doc_70_chunk_41", + "original_index": 41, + "content": " // Draw cursor.\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n\n // Push visual bell after url/underline/strikeout rects.\n let visual_bell_intensity = self.visual_bell.intensity();\n if visual_bell_intensity != 0. {\n let visual_bell_rect = RenderRect::new(\n 0.,\n 0.,\n size_info.width(),\n size_info.height(),\n config.bell.color,\n visual_bell_intensity as f32,\n );\n rects.push(visual_bell_rect);\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_42", + "original_index": 42, + "content": " // Handle IME positioning and search bar rendering.\n let ime_position = match search_state.regex() {\n Some(regex) => {\n let search_label = match search_state.direction() {\n Direction::Right => FORWARD_SEARCH_LABEL,\n Direction::Left => BACKWARD_SEARCH_LABEL,\n };\n\n let search_text = Self::format_search(regex, search_label, size_info.columns());\n\n // Render the search bar.\n self.draw_search(config, &search_text);\n\n // Draw search bar cursor.\n let line = size_info.screen_lines();\n let column = Column(search_text.chars().count() - 1);\n\n" + }, + { + "chunk_id": "doc_70_chunk_43", + "original_index": 43, + "content": " // Add cursor to search bar if IME is not active.\n if self.ime.preedit().is_none() {\n let fg = config.colors.footer_bar_foreground();\n let shape = CursorShape::Underline;\n let cursor = RenderableCursor::new(Point::new(line, column), shape, fg, false);\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n }\n\n Some(Point::new(line, column))\n },\n None => {\n let num_lines = self.size_info.screen_lines();\n term::point_to_viewport(display_offset, cursor_point)\n .filter(|point| point.line < num_lines)\n },\n };\n\n" + }, + { + "chunk_id": "doc_70_chunk_44", + "original_index": 44, + "content": " // Handle IME.\n if self.ime.is_enabled() {\n if let Some(point) = ime_position {\n let (fg, bg) = if search_state.regex().is_some() {\n (config.colors.footer_bar_foreground(), config.colors.footer_bar_background())\n } else {\n (foreground_color, background_color)\n };\n\n self.draw_ime_preview(point, fg, bg, &mut rects, config);\n }\n }\n\n if let Some(message) = message_buffer.message() {\n let search_offset = usize::from(search_state.regex().is_some());\n let text = message.text(&size_info);\n\n" + }, + { + "chunk_id": "doc_70_chunk_45", + "original_index": 45, + "content": " // Create a new rectangle for the background.\n let start_line = size_info.screen_lines() + search_offset;\n let y = size_info.cell_height().mul_add(start_line as f32, size_info.padding_y());\n\n let bg = match message.ty() {\n MessageType::Error => config.colors.normal.red,\n MessageType::Warning => config.colors.normal.yellow,\n };\n\n" + }, + { + "chunk_id": "doc_70_chunk_46", + "original_index": 46, + "content": " let x = 0;\n let width = size_info.width() as i32;\n let height = (size_info.height() - y) as i32;\n let message_bar_rect =\n RenderRect::new(x as f32, y, width as f32, height as f32, bg, 1.);\n\n // Push message_bar in the end, so it'll be above all other content.\n rects.push(message_bar_rect);\n\n // Always damage message bar, since it could have messages of the same size in it.\n self.damage_tracker.frame().add_viewport_rect(&size_info, x, y as i32, width, height);\n\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n\n" + }, + { + "chunk_id": "doc_70_chunk_47", + "original_index": 47, + "content": " // Relay messages to the user.\n let glyph_cache = &mut self.glyph_cache;\n let fg = config.colors.primary.background;\n for (i, message_text) in text.iter().enumerate() {\n let point = Point::new(start_line + i, Column(0));\n self.renderer.draw_string(\n point,\n fg,\n bg,\n message_text.chars(),\n &size_info,\n glyph_cache,\n );\n }\n } else {\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_48", + "original_index": 48, + "content": " self.draw_render_timer(config);\n\n // Draw hyperlink uri preview.\n if has_highlighted_hint {\n let cursor_point = vi_cursor_point.or(Some(cursor_point));\n self.draw_hyperlink_preview(config, cursor_point, display_offset);\n }\n\n // Notify winit that we're about to present.\n self.window.pre_present_notify();\n\n // Highlight damage for debugging.\n if self.damage_tracker.debug {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n let mut rects = Vec::with_capacity(damage.len());\n self.highlight_damage(&mut rects);\n self.renderer.draw_rects(&self.size_info, &metrics, rects);\n }\n\n // Clearing debug highlights from the previous frame requires full redraw.\n self.swap_buffers();\n\n" + }, + { + "chunk_id": "doc_70_chunk_49", + "original_index": 49, + "content": " if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_50", + "original_index": 50, + "content": " /// Update the mouse/vi mode cursor hint highlighting.\n ///\n /// This will return whether the highlighted hints changed.\n pub fn update_highlighted_hints(\n &mut self,\n term: &Term,\n config: &UiConfig,\n mouse: &Mouse,\n modifiers: ModifiersState,\n ) -> bool {\n // Update vi mode cursor hint.\n let vi_highlighted_hint = if term.mode().contains(TermMode::VI) {\n let mods = ModifiersState::all();\n let point = term.vi_mode_cursor.point;\n hint::highlighted_at(term, config, point, mods)\n } else {\n None\n };\n let mut dirty = vi_highlighted_hint != self.vi_highlighted_hint;\n self.vi_highlighted_hint = vi_highlighted_hint;\n\n" + }, + { + "chunk_id": "doc_70_chunk_51", + "original_index": 51, + "content": " // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n" + }, + { + "chunk_id": "doc_70_chunk_52", + "original_index": 52, + "content": " // Update cursor shape.\n if highlighted_hint.is_some() {\n // If mouse changed the line, we should update the hyperlink preview, since the\n // highlighted hint could be disrupted by the old preview.\n dirty = self.hint_mouse_point.map_or(false, |p| p.line != point.line);\n self.hint_mouse_point = Some(point);\n self.window.set_mouse_cursor(CursorIcon::Pointer);\n } else if self.highlighted_hint.is_some() {\n self.hint_mouse_point = None;\n if term.mode().intersects(TermMode::MOUSE_MODE) && !term.mode().contains(TermMode::VI) {\n self.window.set_mouse_cursor(CursorIcon::Default);\n } else {\n self.window.set_mouse_cursor(CursorIcon::Text);\n }\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_53", + "original_index": 53, + "content": " dirty |= self.highlighted_hint != highlighted_hint;\n self.highlighted_hint = highlighted_hint;\n\n dirty\n }\n\n #[inline(never)]\n fn draw_ime_preview(\n &mut self,\n point: Point,\n fg: Rgb,\n bg: Rgb,\n rects: &mut Vec,\n config: &UiConfig,\n ) {\n let preedit = match self.ime.preedit() {\n Some(preedit) => preedit,\n None => {\n // In case we don't have preedit, just set the popup point.\n self.window.update_ime_position(point, &self.size_info);\n return;\n },\n };\n\n let num_cols = self.size_info.columns();\n\n" + }, + { + "chunk_id": "doc_70_chunk_54", + "original_index": 54, + "content": " // Get the visible preedit.\n let visible_text: String = match (preedit.cursor_byte_offset, preedit.cursor_end_offset) {\n (Some(byte_offset), Some(end_offset)) if end_offset > num_cols => StrShortener::new(\n &preedit.text[byte_offset..],\n num_cols,\n ShortenDirection::Right,\n Some(SHORTENER),\n ),\n _ => {\n StrShortener::new(&preedit.text, num_cols, ShortenDirection::Left, Some(SHORTENER))\n },\n }\n .collect();\n\n" + }, + { + "chunk_id": "doc_70_chunk_55", + "original_index": 55, + "content": " let visible_len = visible_text.chars().count();\n\n let end = cmp::min(point.column.0 + visible_len, num_cols);\n let start = end.saturating_sub(visible_len);\n\n let start = Point::new(point.line, Column(start));\n let end = Point::new(point.line, Column(end - 1));\n\n let glyph_cache = &mut self.glyph_cache;\n let metrics = glyph_cache.font_metrics();\n\n self.renderer.draw_string(\n start,\n fg,\n bg,\n visible_text.chars(),\n &self.size_info,\n glyph_cache,\n );\n\n // Damage preedit inside the terminal viewport.\n if self.collect_damage() && point.line < self.size_info.screen_lines() {\n let damage = LineDamageBounds::new(start.line, 0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_56", + "original_index": 56, + "content": " // Add underline for preedit text.\n let underline = RenderLine { start, end, color: fg };\n rects.extend(underline.rects(Flags::UNDERLINE, &metrics, &self.size_info));\n\n let ime_popup_point = match preedit.cursor_end_offset {\n Some(cursor_end_offset) if cursor_end_offset != 0 => {\n let is_wide = preedit.text[preedit.cursor_byte_offset.unwrap_or_default()..]\n .chars()\n .next()\n .map(|ch| ch.width() == Some(2))\n .unwrap_or_default();\n\n" + }, + { + "chunk_id": "doc_70_chunk_57", + "original_index": 57, + "content": " let cursor_column = Column(\n (end.column.0 as isize - cursor_end_offset as isize + 1).max(0) as usize,\n );\n let cursor_point = Point::new(point.line, cursor_column);\n let cursor =\n RenderableCursor::new(cursor_point, CursorShape::HollowBlock, fg, is_wide);\n rects.extend(cursor.rects(&self.size_info, config.cursor.thickness()));\n cursor_point\n },\n _ => end,\n };\n\n" + }, + { + "chunk_id": "doc_70_chunk_58", + "original_index": 58, + "content": " self.window.update_ime_position(ime_popup_point, &self.size_info);\n }\n\n /// Format search regex to account for the cursor and fullwidth characters.\n fn format_search(search_regex: &str, search_label: &str, max_width: usize) -> String {\n let label_len = search_label.len();\n\n // Skip `search_regex` formatting if only label is visible.\n if label_len > max_width {\n return search_label[..max_width].to_owned();\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_59", + "original_index": 59, + "content": " // The search string consists of `search_label` + `search_regex` + `cursor`.\n let mut bar_text = String::from(search_label);\n bar_text.extend(StrShortener::new(\n search_regex,\n max_width.wrapping_sub(label_len + 1),\n ShortenDirection::Left,\n Some(SHORTENER),\n ));\n\n // Add place for cursor.\n bar_text.push(' ');\n\n bar_text\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_60", + "original_index": 60, + "content": " /// Draw preview for the currently highlighted `Hyperlink`.\n #[inline(never)]\n fn draw_hyperlink_preview(\n &mut self,\n config: &UiConfig,\n cursor_point: Option,\n display_offset: usize,\n ) {\n let num_cols = self.size_info.columns();\n let uris: Vec<_> = self\n .highlighted_hint\n .iter()\n .chain(&self.vi_highlighted_hint)\n .filter_map(|hint| hint.hyperlink().map(|hyperlink| hyperlink.uri()))\n .map(|uri| StrShortener::new(uri, num_cols, ShortenDirection::Right, Some(SHORTENER)))\n .collect();\n\n" + }, + { + "chunk_id": "doc_70_chunk_61", + "original_index": 61, + "content": " if uris.is_empty() {\n return;\n }\n\n // The maximum amount of protected lines including the ones we'll show preview on.\n let max_protected_lines = uris.len() * 2;\n\n // Lines we shouldn't show preview on, because it'll obscure the highlighted hint.\n let mut protected_lines = Vec::with_capacity(max_protected_lines);\n if self.size_info.screen_lines() > max_protected_lines {\n // Prefer to show preview even when it'll likely obscure the highlighted hint, when\n // there's no place left for it.\n protected_lines.push(self.hint_mouse_point.map(|point| point.line));\n protected_lines.push(cursor_point.map(|point| point.line));\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_62", + "original_index": 62, + "content": " // Find the line in viewport we can draw preview on without obscuring protected lines.\n let viewport_bottom = self.size_info.bottommost_line() - Line(display_offset as i32);\n let viewport_top = viewport_bottom - (self.size_info.screen_lines() - 1);\n let uri_lines = (viewport_top.0..=viewport_bottom.0)\n .rev()\n .map(|line| Some(Line(line)))\n .filter_map(|line| {\n if protected_lines.contains(&line) {\n None\n } else {\n protected_lines.push(line);\n line\n }\n })\n .take(uris.len())\n .flat_map(|line| term::point_to_viewport(display_offset, Point::new(line, Column(0))));\n\n" + }, + { + "chunk_id": "doc_70_chunk_63", + "original_index": 63, + "content": " let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n for (uri, point) in uris.into_iter().zip(uri_lines) {\n // Damage the uri preview.\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n\n // Damage the uri preview for the next frame as well.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n self.renderer.draw_string(point, fg, bg, uri, &self.size_info, &mut self.glyph_cache);\n }\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_64", + "original_index": 64, + "content": " /// Draw current search regex.\n #[inline(never)]\n fn draw_search(&mut self, config: &UiConfig, text: &str) {\n // Assure text length is at least num_cols.\n let num_cols = self.size_info.columns();\n let text = format!(\"{:<1$}\", text, num_cols);\n\n let point = Point::new(self.size_info.screen_lines(), Column(0));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n\n" + }, + { + "chunk_id": "doc_70_chunk_65", + "original_index": 65, + "content": " self.renderer.draw_string(\n point,\n fg,\n bg,\n text.chars(),\n &self.size_info,\n &mut self.glyph_cache,\n );\n }\n\n /// Draw render timer.\n #[inline(never)]\n fn draw_render_timer(&mut self, config: &UiConfig) {\n if !config.debug.render_timer {\n return;\n }\n\n let timing = format!(\"{:.3} usec\", self.meter.average());\n let point = Point::new(self.size_info.screen_lines().saturating_sub(2), Column(0));\n let fg = config.colors.primary.background;\n let bg = config.colors.normal.red;\n\n" + }, + { + "chunk_id": "doc_70_chunk_66", + "original_index": 66, + "content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, timing.len());\n self.damage_tracker.frame().damage_line(damage);\n // Damage the render timer for the next frame.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, timing.chars(), &self.size_info, glyph_cache);\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_67", + "original_index": 67, + "content": " /// Draw an indicator for the position of a line in history.\n #[inline(never)]\n fn draw_line_indicator(\n &mut self,\n config: &UiConfig,\n total_lines: usize,\n obstructed_column: Option,\n line: usize,\n ) {\n let columns = self.size_info.columns();\n let text = format!(\"[{}/{}]\", line, total_lines - 1);\n let column = Column(self.size_info.columns().saturating_sub(text.len()));\n let point = Point::new(0, column);\n\n" + }, + { + "chunk_id": "doc_70_chunk_68", + "original_index": 68, + "content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, columns - 1);\n self.damage_tracker.frame().damage_line(damage);\n // Damage it on the next frame in case it goes away.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let colors = &config.colors;\n let fg = colors.line_indicator.foreground.unwrap_or(colors.primary.background);\n let bg = colors.line_indicator.background.unwrap_or(colors.primary.foreground);\n\n" + }, + { + "chunk_id": "doc_70_chunk_69", + "original_index": 69, + "content": " // Do not render anything if it would obscure the vi mode cursor.\n if obstructed_column.map_or(true, |obstructed_column| obstructed_column < column) {\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, text.chars(), &self.size_info, glyph_cache);\n }\n }\n\n /// Returns `true` if damage information should be collected, `false` otherwise.\n #[inline]\n fn collect_damage(&self) -> bool {\n matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) || self.damage_tracker.debug\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_70", + "original_index": 70, + "content": " /// Highlight damaged rects.\n ///\n /// This function is for debug purposes only.\n fn highlight_damage(&self, render_rects: &mut Vec) {\n for damage_rect in &self.damage_tracker.shape_frame_damage(self.size_info.into()) {\n let x = damage_rect.x as f32;\n let height = damage_rect.height as f32;\n let width = damage_rect.width as f32;\n let y = damage_y_to_viewport_y(&self.size_info, damage_rect) as f32;\n let render_rect = RenderRect::new(x, y, width, height, DAMAGE_RECT_COLOR, 0.5);\n\n" + }, + { + "chunk_id": "doc_70_chunk_71", + "original_index": 71, + "content": " render_rects.push(render_rect);\n }\n }\n\n /// Request a new frame for a window on Wayland.\n fn request_frame(&mut self, scheduler: &mut Scheduler) {\n // Mark that we've used a frame.\n self.window.has_frame = false;\n\n // Get the display vblank interval.\n let monitor_vblank_interval = 1_000_000.\n / self\n .window\n .current_monitor()\n .and_then(|monitor| monitor.refresh_rate_millihertz())\n .unwrap_or(60_000) as f64;\n\n" + }, + { + "chunk_id": "doc_70_chunk_72", + "original_index": 72, + "content": " // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_73", + "original_index": 73, + "content": "impl Drop for Display {\n fn drop(&mut self) {\n // Switch OpenGL context before dropping, otherwise objects (like programs) from other\n // contexts might be deleted when dropping renderer.\n self.make_current();\n unsafe {\n ManuallyDrop::drop(&mut self.renderer);\n ManuallyDrop::drop(&mut self.context);\n ManuallyDrop::drop(&mut self.surface);\n }\n }\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_74", + "original_index": 74, + "content": "/// Input method state.\n#[derive(Debug, Default)]\npub struct Ime {\n /// Whether the IME is enabled.\n enabled: bool,\n\n /// Current IME preedit.\n preedit: Option,\n}\n\nimpl Ime {\n #[inline]\n pub fn set_enabled(&mut self, is_enabled: bool) {\n if is_enabled {\n self.enabled = is_enabled\n } else {\n // Clear state when disabling IME.\n *self = Default::default();\n }\n }\n\n #[inline]\n pub fn is_enabled(&self) -> bool {\n self.enabled\n }\n\n #[inline]\n pub fn set_preedit(&mut self, preedit: Option) {\n self.preedit = preedit;\n }\n\n #[inline]\n pub fn preedit(&self) -> Option<&Preedit> {\n self.preedit.as_ref()\n }\n}\n\n#[derive(Debug, Default, PartialEq, Eq)]\npub struct Preedit {\n /// The preedit text.\n text: String,\n\n" + }, + { + "chunk_id": "doc_70_chunk_75", + "original_index": 75, + "content": " /// Byte offset for cursor start into the preedit text.\n ///\n /// `None` means that the cursor is invisible.\n cursor_byte_offset: Option,\n\n /// The cursor offset from the end of the preedit in char width.\n cursor_end_offset: Option,\n}\n\nimpl Preedit {\n pub fn new(text: String, cursor_byte_offset: Option) -> Self {\n let cursor_end_offset = if let Some(byte_offset) = cursor_byte_offset {\n // Convert byte offset into char offset.\n let cursor_end_offset =\n text[byte_offset..].chars().fold(0, |acc, ch| acc + ch.width().unwrap_or(1));\n\n" + }, + { + "chunk_id": "doc_70_chunk_76", + "original_index": 76, + "content": " Some(cursor_end_offset)\n } else {\n None\n };\n\n Self { text, cursor_byte_offset, cursor_end_offset }\n }\n}\n\n/// Pending renderer updates.\n///\n/// All renderer updates are cached to be applied just before rendering, to avoid platform-specific\n/// rendering issues.\n#[derive(Debug, Default, Copy, Clone)]\npub struct RendererUpdate {\n /// Should resize the window.\n resize: bool,\n\n /// Clear font caches.\n clear_font_cache: bool,\n}\n\n/// Struct for safe in-place replacement.\n///\n/// This struct allows easily replacing struct fields that provide `self -> Self` methods in-place,\n/// without having to deal with constantly unwrapping the underlying [`Option`].\nstruct Replaceable(Option);\n\n" + }, + { + "chunk_id": "doc_70_chunk_77", + "original_index": 77, + "content": "impl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_78", + "original_index": 78, + "content": "impl Deref for Replaceable {\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n self.get()\n }\n}\n\nimpl DerefMut for Replaceable {\n fn deref_mut(&mut self) -> &mut Self::Target {\n self.get_mut()\n }\n}\n\n/// The frame timer state.\npub struct FrameTimer {\n /// Base timestamp used to compute sync points.\n base: Instant,\n\n /// The last timestamp we synced to.\n last_synced_timestamp: Instant,\n\n /// The refresh rate we've used to compute sync timestamps.\n refresh_interval: Duration,\n}\n\nimpl FrameTimer {\n pub fn new() -> Self {\n let now = Instant::now();\n Self { base: now, last_synced_timestamp: now, refresh_interval: Duration::ZERO }\n }\n\n" + }, + { + "chunk_id": "doc_70_chunk_79", + "original_index": 79, + "content": " /// Compute the delay that we should use to achieve the target frame\n /// rate.\n pub fn compute_timeout(&mut self, refresh_interval: Duration) -> Duration {\n let now = Instant::now();\n\n // Handle refresh rate change.\n if self.refresh_interval != refresh_interval {\n self.base = now;\n self.last_synced_timestamp = now;\n self.refresh_interval = refresh_interval;\n return refresh_interval;\n }\n\n let next_frame = self.last_synced_timestamp + self.refresh_interval;\n\n" + }, + { + "chunk_id": "doc_70_chunk_80", + "original_index": 80, + "content": " if next_frame < now {\n // Redraw immediately if we haven't drawn in over `refresh_interval` microseconds.\n let elapsed_micros = (now - self.base).as_micros() as u64;\n let refresh_micros = self.refresh_interval.as_micros() as u64;\n self.last_synced_timestamp =\n now - Duration::from_micros(elapsed_micros % refresh_micros);\n Duration::ZERO\n } else {\n // Redraw on the next `refresh_interval` clock tick.\n self.last_synced_timestamp = next_frame;\n next_frame - now\n }\n }\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_81", + "original_index": 81, + "content": "/// Calculate the cell dimensions based on font metrics.\n///\n/// This will return a tuple of the cell width and height.\n#[inline]\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n let offset_x = f64::from(config.font.offset.x);\n let offset_y = f64::from(config.font.offset.y);\n (\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n )\n}\n\n" + }, + { + "chunk_id": "doc_70_chunk_82", + "original_index": 82, + "content": "/// Calculate the size of the window given padding, terminal dimensions and cell size.\nfn window_size(\n config: &UiConfig,\n dimensions: Dimensions,\n cell_width: f32,\n cell_height: f32,\n scale_factor: f32,\n) -> PhysicalSize {\n let padding = config.window.padding(scale_factor);\n\n let grid_width = cell_width * dimensions.columns.max(MIN_COLUMNS) as f32;\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n PhysicalSize::new(width as u32, height as u32)\n}\n" + } + ] + }, + { + "doc_id": "doc_71", + "original_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_71_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_71_chunk_1", + "original_index": 1, + "content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_72", + "original_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", + "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\nfrom typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n \"\"\"\n Params for :class:`VectorSlicer`.\n \"\"\"\n\n INDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n\n def __init__(self, java_params):\n super(_VectorSlicerParams, self).__init__(java_params)\n\n def set_indices(self, *ind: int):\n return self.set(self.INDICES, ind)\n\n def get_indices(self) -> Tuple[int, ...]:\n return self.get(self.INDICES)\n\n @property\n def indices(self) -> Tuple[int, ...]:\n return self.get_indices()\n\n\nclass VectorSlicer(JavaFeatureTransformer, _VectorSlicerParams):\n \"\"\"\n A Transformer that transforms a vector to a new feature, which is a sub-array of\n the original feature.It is useful for extracting features from a given vector.\n\n Note that duplicate features are not allowed, so there can be no overlap between\n selected indices. If the max value of the indices is greater than the size of\n the input vector, it throws an IllegalArgumentException.\n \"\"\"\n\n def __init__(self, java_model=None):\n super(VectorSlicer, self).__init__(java_model)\n\n @classmethod\n def _java_transformer_package_name(cls) -> str:\n return \"vectorslicer\"\n\n @classmethod\n def _java_transformer_class_name(cls) -> str:\n return \"VectorSlicer\"\n", + "chunks": [ + { + "chunk_id": "doc_72_chunk_0", + "original_index": 0, + "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n" + }, + { + "chunk_id": "doc_72_chunk_1", + "original_index": 1, + "content": "from typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n" + }, + { + "chunk_id": "doc_72_chunk_2", + "original_index": 2, + "content": " \"\"\"\n Params for :class:`VectorSlicer`.\n \"\"\"\n\n INDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n\n def __init__(self, java_params):\n super(_VectorSlicerParams, self).__init__(java_params)\n\n def set_indices(self, *ind: int):\n return self.set(self.INDICES, ind)\n\n def get_indices(self) -> Tuple[int, ...]:\n return self.get(self.INDICES)\n\n @property\n def indices(self) -> Tuple[int, ...]:\n return self.get_indices()\n\n" + }, + { + "chunk_id": "doc_72_chunk_3", + "original_index": 3, + "content": "\nclass VectorSlicer(JavaFeatureTransformer, _VectorSlicerParams):\n \"\"\"\n A Transformer that transforms a vector to a new feature, which is a sub-array of\n the original feature.It is useful for extracting features from a given vector.\n\n Note that duplicate features are not allowed, so there can be no overlap between\n selected indices. If the max value of the indices is greater than the size of\n the input vector, it throws an IllegalArgumentException.\n \"\"\"\n\n def __init__(self, java_model=None):\n super(VectorSlicer, self).__init__(java_model)\n\n @classmethod\n def _java_transformer_package_name(cls) -> str:\n return \"vectorslicer\"\n\n @classmethod\n def _java_transformer_class_name(cls) -> str:\n return \"VectorSlicer\"\n" + } + ] + }, + { + "doc_id": "doc_73", + "original_uuid": "883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.iteration.operator.allround;\n\nimport org.apache.flink.annotation.Internal;\n\nimport java.util.function.Supplier;\n\n/**\n * Operators or UDF implements this method would be provided with an supplier that provides the\n * current rounds of the current element.\n */\n@Internal\npublic interface EpochAware {\n\n void setEpochSupplier(Supplier epochSupplier);\n}\n", + "chunks": [ + { + "chunk_id": "doc_73_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_73_chunk_1", + "original_index": 1, + "content": "package org.apache.flink.iteration.operator.allround;\n\nimport org.apache.flink.annotation.Internal;\n\nimport java.util.function.Supplier;\n\n/**\n * Operators or UDF implements this method would be provided with an supplier that provides the\n * current rounds of the current element.\n */\n@Internal\npublic interface EpochAware {\n\n void setEpochSupplier(Supplier epochSupplier);\n}\n" + } + ] + }, + { + "doc_id": "doc_74", + "original_uuid": "8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.\n */\n\npackage org.apache.flink.ml.linalg;\n\nimport org.apache.flink.api.common.typeinfo.TypeInfo;\nimport org.apache.flink.ml.linalg.typeinfo.VectorWithNormTypeInfoFactory;\n\n/** A vector with its norm. */\n@TypeInfo(VectorWithNormTypeInfoFactory.class)\npublic class VectorWithNorm {\n public final Vector vector;\n\n public final double l2Norm;\n\n public VectorWithNorm(Vector vector) {\n this(vector, BLAS.norm2(vector));\n }\n\n public VectorWithNorm(Vector vector, double l2Norm) {\n this.vector = vector;\n this.l2Norm = l2Norm;\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_74_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.\n */\n\n" + }, + { + "chunk_id": "doc_74_chunk_1", + "original_index": 1, + "content": "package org.apache.flink.ml.linalg;\n\nimport org.apache.flink.api.common.typeinfo.TypeInfo;\nimport org.apache.flink.ml.linalg.typeinfo.VectorWithNormTypeInfoFactory;\n\n/** A vector with its norm. */\n@TypeInfo(VectorWithNormTypeInfoFactory.class)\npublic class VectorWithNorm {\n public final Vector vector;\n\n public final double l2Norm;\n\n public VectorWithNorm(Vector vector) {\n this(vector, BLAS.norm2(vector));\n }\n\n public VectorWithNorm(Vector vector, double l2Norm) {\n this.vector = vector;\n this.l2Norm = l2Norm;\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_75", + "original_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", + "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\nfrom typing import List\n\nfrom pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n (1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,\n 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)),\n (1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,\n 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)),\n (1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,\n 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)),\n (2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,\n 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)),\n (2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,\n 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)),\n (2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,\n 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)),\n (2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,\n 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)),\n (2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,\n 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)),\n (3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,\n 8.22809049e-01, 3.26739456e-01, 7.88268404e-01)),\n (3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,\n 7.33931213e-01, 1.42554396e-01, 7.11225605e-01)),\n (3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,\n 2.51532056e-01, 2.82729807e-01, 7.16245686e-01)),\n (3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,\n 5.93898886e-01, 5.68425656e-01, 8.49762330e-01)),\n (3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,\n 5.49682879e-01, 8.59931442e-01, 4.88677978e-02)),\n (4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,\n 9.23509168e-01, 1.16995043e-01, 5.91894106e-03)),\n (4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,\n 4.09421563e-01, 5.09375719e-01, 5.93157850e-01)),\n (4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,\n 3.62654055e-01, 2.43437468e-01, 7.13052838e-01)),\n (4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,\n 8.24123861e-01, 5.84074506e-01, 6.54461558e-01)),\n (4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,\n 5.16336729e-01, 9.99776159e-01, 3.15769738e-01)),\n ],\n type_info=Types.ROW_NAMED(\n ['label', 'features'],\n [Types.INT(), DenseVectorTypeInfo()])\n ))\n\n def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n univariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n self.assertEqual('test_features', univariate_feature_selector.features_col)\n self.assertEqual('test_label', univariate_feature_selector.label_col)\n self.assertEqual('test_output', univariate_feature_selector.output_col)\n self.assertEqual('continuous', univariate_feature_selector.feature_type)\n self.assertEqual('categorical', univariate_feature_selector.label_type)\n self.assertEqual('fpr', univariate_feature_selector.selection_mode)\n self.assertEqual(0.01, univariate_feature_selector.selection_threshold)\n\n def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n def test_fit_and_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n output = model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_get_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n model_data = model.get_model_data()[0]\n self.assertEqual(['indices'], model_data.get_schema().get_field_names())\n\n model_rows = [result for result in\n self.t_env.to_data_stream(model_data).execute_and_collect()]\n self.assertEqual(1, len(model_rows))\n self.assertListEqual([0, 2, 1], model_rows[0][0])\n\n def test_set_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model_a = selector.fit(self.input_table)\n model_data = model_a.get_model_data()[0]\n\n model_b = UnivariateFeatureSelectorModel() \\\n .set_model_data(model_data)\n update_existing_params(model_b, model_a)\n\n output = model_b.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_save_load_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n reloaded_selector = self.save_and_reload(selector)\n model = reloaded_selector.fit(self.input_table)\n reloaded_model = self.save_and_reload(model)\n output = reloaded_model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def verify_output_result(\n self, output: Table,\n field_names: List[str],\n feature_col: str,\n output_col: str,\n indices: List[int]):\n collected_results = [result for result in\n self.t_env.to_data_stream(output).execute_and_collect()]\n for item in collected_results:\n item.set_field_names(field_names)\n self.assertEqual(len(indices), item[output_col].size())\n for i in range(0, len(indices)):\n self.assertEqual(item[feature_col].get(indices[i]),\n item[output_col].get(i))\n", + "chunks": [ + { + "chunk_id": "doc_75_chunk_0", + "original_index": 0, + "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\nfrom typing import List\n\n" + }, + { + "chunk_id": "doc_75_chunk_1", + "original_index": 1, + "content": "from pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n" + }, + { + "chunk_id": "doc_75_chunk_2", + "original_index": 2, + "content": " (1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,\n 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)),\n (1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,\n 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)),\n (1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,\n 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)),\n" + }, + { + "chunk_id": "doc_75_chunk_3", + "original_index": 3, + "content": " (2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,\n 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)),\n (2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,\n 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)),\n (2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,\n 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)),\n (2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,\n 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)),\n" + }, + { + "chunk_id": "doc_75_chunk_4", + "original_index": 4, + "content": " (2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,\n 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)),\n (3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,\n 8.22809049e-01, 3.26739456e-01, 7.88268404e-01)),\n (3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,\n 7.33931213e-01, 1.42554396e-01, 7.11225605e-01)),\n (3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,\n 2.51532056e-01, 2.82729807e-01, 7.16245686e-01)),\n" + }, + { + "chunk_id": "doc_75_chunk_5", + "original_index": 5, + "content": " (3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,\n 5.93898886e-01, 5.68425656e-01, 8.49762330e-01)),\n (3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,\n 5.49682879e-01, 8.59931442e-01, 4.88677978e-02)),\n (4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,\n 9.23509168e-01, 1.16995043e-01, 5.91894106e-03)),\n (4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,\n 4.09421563e-01, 5.09375719e-01, 5.93157850e-01)),\n (4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,\n 3.62654055e-01, 2.43437468e-01, 7.13052838e-01)),\n" + }, + { + "chunk_id": "doc_75_chunk_6", + "original_index": 6, + "content": " (4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,\n 8.24123861e-01, 5.84074506e-01, 6.54461558e-01)),\n (4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,\n 5.16336729e-01, 9.99776159e-01, 3.15769738e-01)),\n ],\n type_info=Types.ROW_NAMED(\n ['label', 'features'],\n [Types.INT(), DenseVectorTypeInfo()])\n ))\n\n" + }, + { + "chunk_id": "doc_75_chunk_7", + "original_index": 7, + "content": " def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n" + }, + { + "chunk_id": "doc_75_chunk_8", + "original_index": 8, + "content": " univariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n self.assertEqual('test_features', univariate_feature_selector.features_col)\n self.assertEqual('test_label', univariate_feature_selector.label_col)\n self.assertEqual('test_output', univariate_feature_selector.output_col)\n self.assertEqual('continuous', univariate_feature_selector.feature_type)\n self.assertEqual('categorical', univariate_feature_selector.label_type)\n self.assertEqual('fpr', univariate_feature_selector.selection_mode)\n self.assertEqual(0.01, univariate_feature_selector.selection_threshold)\n\n" + }, + { + "chunk_id": "doc_75_chunk_9", + "original_index": 9, + "content": " def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n" + }, + { + "chunk_id": "doc_75_chunk_10", + "original_index": 10, + "content": " def test_fit_and_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n output = model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_get_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n model_data = model.get_model_data()[0]\n self.assertEqual(['indices'], model_data.get_schema().get_field_names())\n\n" + }, + { + "chunk_id": "doc_75_chunk_11", + "original_index": 11, + "content": " model_rows = [result for result in\n self.t_env.to_data_stream(model_data).execute_and_collect()]\n self.assertEqual(1, len(model_rows))\n self.assertListEqual([0, 2, 1], model_rows[0][0])\n\n def test_set_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model_a = selector.fit(self.input_table)\n model_data = model_a.get_model_data()[0]\n\n model_b = UnivariateFeatureSelectorModel() \\\n .set_model_data(model_data)\n update_existing_params(model_b, model_a)\n\n output = model_b.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n" + }, + { + "chunk_id": "doc_75_chunk_12", + "original_index": 12, + "content": " def test_save_load_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n reloaded_selector = self.save_and_reload(selector)\n model = reloaded_selector.fit(self.input_table)\n reloaded_model = self.save_and_reload(model)\n output = reloaded_model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n" + }, + { + "chunk_id": "doc_75_chunk_13", + "original_index": 13, + "content": " def verify_output_result(\n self, output: Table,\n field_names: List[str],\n feature_col: str,\n output_col: str,\n indices: List[int]):\n collected_results = [result for result in\n self.t_env.to_data_stream(output).execute_and_collect()]\n for item in collected_results:\n item.set_field_names(field_names)\n self.assertEqual(len(indices), item[output_col].size())\n for i in range(0, len(indices)):\n self.assertEqual(item[feature_col].get(indices[i]),\n item[output_col].get(i))\n" + } + ] + }, + { + "doc_id": "doc_76", + "original_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.regression;\n\nimport org.apache.flink.api.common.typeinfo.TypeInformation;\nimport org.apache.flink.api.common.typeinfo.Types;\nimport org.apache.flink.api.java.typeutils.RowTypeInfo;\nimport org.apache.flink.ml.linalg.SparseVector;\nimport org.apache.flink.ml.linalg.Vectors;\nimport org.apache.flink.ml.linalg.typeinfo.DenseVectorTypeInfo;\nimport org.apache.flink.ml.regression.linearregression.LinearRegression;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModel;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModelData;\nimport org.apache.flink.ml.util.ParamUtils;\nimport org.apache.flink.ml.util.TestUtils;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.test.util.AbstractTestBase;\nimport org.apache.flink.types.Row;\n\nimport org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.lang3.RandomUtils;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\n/** Tests {@link LinearRegression} and {@link LinearRegressionModel}. */\npublic class LinearRegressionTest extends AbstractTestBase {\n\n @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();\n\n private StreamExecutionEnvironment env;\n\n private StreamTableEnvironment tEnv;\n\n private static final List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(2, 4), 10.0, 1.0),\n Row.of(Vectors.dense(2, 2), 6.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(1, 2), 5.0, 1.0),\n Row.of(Vectors.dense(5, 3), 11.0, 1.0));\n\n private static final double[] expectedCoefficient = new double[] {1.141, 1.829};\n\n private static final double TOLERANCE = 1e-7;\n\n private static final double PREDICTION_TOLERANCE = 0.1;\n\n private static final double COEFFICIENT_TOLERANCE = 0.1;\n\n private Table trainDataTable;\n\n @Before\n public void before() {\n env = TestUtils.getExecutionEnvironment();\n tEnv = StreamTableEnvironment.create(env);\n Collections.shuffle(trainData);\n trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n }\n\n @SuppressWarnings(\"unchecked\")\n private void verifyPredictionResult(Table output, String labelCol, String predictionCol)\n throws Exception {\n List predResult = IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect());\n for (Row predictionRow : predResult) {\n double label = ((Number) predictionRow.getField(labelCol)).doubleValue();\n double prediction = (double) predictionRow.getField(predictionCol);\n assertTrue(Math.abs(prediction - label) / label < PREDICTION_TOLERANCE);\n }\n }\n\n @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n linearRegression\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setMaxIter(1000)\n .setTol(0.001)\n .setLearningRate(0.5)\n .setGlobalBatchSize(1000)\n .setReg(0.1)\n .setElasticNet(0.5)\n .setPredictionCol(\"test_predictionCol\");\n assertEquals(\"test_features\", linearRegression.getFeaturesCol());\n assertEquals(\"test_label\", linearRegression.getLabelCol());\n assertEquals(\"test_weight\", linearRegression.getWeightCol());\n assertEquals(1000, linearRegression.getMaxIter());\n assertEquals(0.001, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.5, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(1000, linearRegression.getGlobalBatchSize());\n assertEquals(0.1, linearRegression.getReg(), TOLERANCE);\n assertEquals(0.5, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"test_predictionCol\", linearRegression.getPredictionCol());\n }\n\n @Test\n public void testOutputSchema() {\n Table tempTable = trainDataTable.as(\"test_features\", \"test_label\", \"test_weight\");\n LinearRegression linearRegression =\n new LinearRegression()\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setPredictionCol(\"test_predictionCol\");\n Table output = linearRegression.fit(trainDataTable).transform(tempTable)[0];\n assertEquals(\n Arrays.asList(\"test_features\", \"test_label\", \"test_weight\", \"test_predictionCol\"),\n output.getResolvedSchema().getColumnNames());\n }\n\n @Test\n public void testFitAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testSaveLoadAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n linearRegression =\n TestUtils.saveAndReload(\n tEnv,\n linearRegression,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegression::load);\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n model =\n TestUtils.saveAndReload(\n tEnv,\n model,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegressionModel::load);\n assertEquals(\n Collections.singletonList(\"coefficient\"),\n model.getModelData()[0].getResolvedSchema().getColumnNames());\n Table output = model.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testGetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n assertNotNull(modelData);\n assertEquals(1, modelData.size());\n assertArrayEquals(\n expectedCoefficient, modelData.get(0).coefficient.values, COEFFICIENT_TOLERANCE);\n }\n\n @Test\n public void testSetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n LinearRegressionModel newModel = new LinearRegressionModel();\n ParamUtils.updateExistingParams(newModel, model.getParamMap());\n newModel.setModelData(model.getModelData());\n Table output = newModel.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testMoreSubtaskThanData() throws Exception {\n List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0));\n\n Table trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n\n LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n @SuppressWarnings(\"unchecked\")\n private void checkRegularization(double reg, double elasticNet, double[] expectedCoefficient)\n throws Exception {\n LinearRegressionModel model =\n new LinearRegression()\n .setWeightCol(\"weight\")\n .setReg(reg)\n .setElasticNet(elasticNet)\n .fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n final double errorTol = 1e-3;\n assertArrayEquals(expectedCoefficient, modelData.get(0).coefficient.values, errorTol);\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_76_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.regression;\n\n" + }, + { + "chunk_id": "doc_76_chunk_1", + "original_index": 1, + "content": "import org.apache.flink.api.common.typeinfo.TypeInformation;\nimport org.apache.flink.api.common.typeinfo.Types;\nimport org.apache.flink.api.java.typeutils.RowTypeInfo;\nimport org.apache.flink.ml.linalg.SparseVector;\nimport org.apache.flink.ml.linalg.Vectors;\nimport org.apache.flink.ml.linalg.typeinfo.DenseVectorTypeInfo;\nimport org.apache.flink.ml.regression.linearregression.LinearRegression;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModel;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModelData;\nimport org.apache.flink.ml.util.ParamUtils;\nimport org.apache.flink.ml.util.TestUtils;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.test.util.AbstractTestBase;\nimport org.apache.flink.types.Row;\n\n" + }, + { + "chunk_id": "doc_76_chunk_2", + "original_index": 2, + "content": "import org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.lang3.RandomUtils;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\n/** Tests {@link LinearRegression} and {@link LinearRegressionModel}. */\npublic class LinearRegressionTest extends AbstractTestBase {\n\n @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();\n\n private StreamExecutionEnvironment env;\n\n private StreamTableEnvironment tEnv;\n\n" + }, + { + "chunk_id": "doc_76_chunk_3", + "original_index": 3, + "content": " private static final List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(2, 4), 10.0, 1.0),\n Row.of(Vectors.dense(2, 2), 6.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(1, 2), 5.0, 1.0),\n Row.of(Vectors.dense(5, 3), 11.0, 1.0));\n\n private static final double[] expectedCoefficient = new double[] {1.141, 1.829};\n\n private static final double TOLERANCE = 1e-7;\n\n private static final double PREDICTION_TOLERANCE = 0.1;\n\n private static final double COEFFICIENT_TOLERANCE = 0.1;\n\n private Table trainDataTable;\n\n" + }, + { + "chunk_id": "doc_76_chunk_4", + "original_index": 4, + "content": " @Before\n public void before() {\n env = TestUtils.getExecutionEnvironment();\n tEnv = StreamTableEnvironment.create(env);\n Collections.shuffle(trainData);\n trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n }\n\n" + }, + { + "chunk_id": "doc_76_chunk_5", + "original_index": 5, + "content": " @SuppressWarnings(\"unchecked\")\n private void verifyPredictionResult(Table output, String labelCol, String predictionCol)\n throws Exception {\n List predResult = IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect());\n for (Row predictionRow : predResult) {\n double label = ((Number) predictionRow.getField(labelCol)).doubleValue();\n double prediction = (double) predictionRow.getField(predictionCol);\n assertTrue(Math.abs(prediction - label) / label < PREDICTION_TOLERANCE);\n }\n }\n\n" + }, + { + "chunk_id": "doc_76_chunk_6", + "original_index": 6, + "content": " @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n" + }, + { + "chunk_id": "doc_76_chunk_7", + "original_index": 7, + "content": " linearRegression\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setMaxIter(1000)\n .setTol(0.001)\n .setLearningRate(0.5)\n .setGlobalBatchSize(1000)\n .setReg(0.1)\n .setElasticNet(0.5)\n .setPredictionCol(\"test_predictionCol\");\n assertEquals(\"test_features\", linearRegression.getFeaturesCol());\n assertEquals(\"test_label\", linearRegression.getLabelCol());\n assertEquals(\"test_weight\", linearRegression.getWeightCol());\n" + }, + { + "chunk_id": "doc_76_chunk_8", + "original_index": 8, + "content": " assertEquals(1000, linearRegression.getMaxIter());\n assertEquals(0.001, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.5, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(1000, linearRegression.getGlobalBatchSize());\n assertEquals(0.1, linearRegression.getReg(), TOLERANCE);\n assertEquals(0.5, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"test_predictionCol\", linearRegression.getPredictionCol());\n }\n\n" + }, + { + "chunk_id": "doc_76_chunk_9", + "original_index": 9, + "content": " @Test\n public void testOutputSchema() {\n Table tempTable = trainDataTable.as(\"test_features\", \"test_label\", \"test_weight\");\n LinearRegression linearRegression =\n new LinearRegression()\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setPredictionCol(\"test_predictionCol\");\n Table output = linearRegression.fit(trainDataTable).transform(tempTable)[0];\n assertEquals(\n Arrays.asList(\"test_features\", \"test_label\", \"test_weight\", \"test_predictionCol\"),\n output.getResolvedSchema().getColumnNames());\n }\n\n" + }, + { + "chunk_id": "doc_76_chunk_10", + "original_index": 10, + "content": " @Test\n public void testFitAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n" + }, + { + "chunk_id": "doc_76_chunk_11", + "original_index": 11, + "content": " @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n" + }, + { + "chunk_id": "doc_76_chunk_12", + "original_index": 12, + "content": " @Test\n public void testSaveLoadAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n linearRegression =\n TestUtils.saveAndReload(\n tEnv,\n linearRegression,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegression::load);\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n" + }, + { + "chunk_id": "doc_76_chunk_13", + "original_index": 13, + "content": " model =\n TestUtils.saveAndReload(\n tEnv,\n model,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegressionModel::load);\n assertEquals(\n Collections.singletonList(\"coefficient\"),\n model.getModelData()[0].getResolvedSchema().getColumnNames());\n Table output = model.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n" + }, + { + "chunk_id": "doc_76_chunk_14", + "original_index": 14, + "content": " @Test\n public void testGetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n assertNotNull(modelData);\n assertEquals(1, modelData.size());\n assertArrayEquals(\n expectedCoefficient, modelData.get(0).coefficient.values, COEFFICIENT_TOLERANCE);\n }\n\n" + }, + { + "chunk_id": "doc_76_chunk_15", + "original_index": 15, + "content": " @Test\n public void testSetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n LinearRegressionModel newModel = new LinearRegressionModel();\n ParamUtils.updateExistingParams(newModel, model.getParamMap());\n newModel.setModelData(model.getModelData());\n Table output = newModel.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n" + }, + { + "chunk_id": "doc_76_chunk_16", + "original_index": 16, + "content": " @Test\n public void testMoreSubtaskThanData() throws Exception {\n List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0));\n\n Table trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n\n" + }, + { + "chunk_id": "doc_76_chunk_17", + "original_index": 17, + "content": " LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n" + }, + { + "chunk_id": "doc_76_chunk_18", + "original_index": 18, + "content": " @SuppressWarnings(\"unchecked\")\n private void checkRegularization(double reg, double elasticNet, double[] expectedCoefficient)\n throws Exception {\n LinearRegressionModel model =\n new LinearRegression()\n .setWeightCol(\"weight\")\n .setReg(reg)\n .setElasticNet(elasticNet)\n .fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n final double errorTol = 1e-3;\n assertArrayEquals(expectedCoefficient, modelData.get(0).coefficient.values, errorTol);\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_77", + "original_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.iteration.datacache.nonkeyed;\n\nimport org.apache.flink.annotation.Internal;\nimport org.apache.flink.api.common.typeutils.TypeSerializer;\nimport org.apache.flink.core.fs.Path;\nimport org.apache.flink.core.memory.DataOutputView;\nimport org.apache.flink.core.memory.DataOutputViewStreamWrapper;\nimport org.apache.flink.core.memory.MemorySegment;\nimport org.apache.flink.runtime.memory.MemoryAllocationException;\nimport org.apache.flink.table.runtime.util.MemorySegmentPool;\n\nimport javax.annotation.Nullable;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Optional;\n\n/** A class that writes cache data to memory segments. */\n@Internal\nclass MemorySegmentWriter implements SegmentWriter {\n\n /** The tool to serialize received records into bytes. */\n private final TypeSerializer serializer;\n\n /** The pre-allocated path to hold cached records into the file system. */\n private final Path path;\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The output stream to write serialized content to memory segments. */\n private final ManagedMemoryOutputStream outputStream;\n\n /** The wrapper view of the output stream to be used with TypeSerializer API. */\n private final DataOutputView outputView;\n\n /** The number of records added so far. */\n private int count;\n\n MemorySegmentWriter(\n TypeSerializer serializer,\n Path path,\n MemorySegmentPool segmentPool,\n long expectedSize)\n throws MemoryAllocationException {\n this.serializer = serializer;\n this.path = path;\n this.segmentPool = segmentPool;\n this.outputStream = new ManagedMemoryOutputStream(segmentPool, expectedSize);\n this.outputView = new DataOutputViewStreamWrapper(outputStream);\n this.count = 0;\n }\n\n @Override\n public boolean addRecord(T record) throws IOException {\n if (outputStream.getPos() >= DataCacheWriter.MAX_SEGMENT_SIZE) {\n return false;\n }\n try {\n serializer.serialize(record, outputView);\n count++;\n return true;\n } catch (RuntimeException e) {\n if (e.getCause() instanceof MemoryAllocationException) {\n return false;\n }\n throw e;\n }\n }\n\n @Override\n public Optional finish() throws IOException {\n if (count > 0) {\n return Optional.of(new Segment(path, count, outputStream.getSegments()));\n } else {\n segmentPool.returnAll(outputStream.getSegments());\n return Optional.empty();\n }\n }\n\n /** An output stream subclass that accepts bytes and writes them to memory segments. */\n private static class ManagedMemoryOutputStream extends OutputStream {\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The number of bytes in a memory segment. */\n private final int pageSize;\n\n /** The memory segments containing written bytes. */\n private final List segments = new ArrayList<>();\n\n /** The index of the segment that currently accepts written bytes. */\n private int segmentIndex;\n\n /** The number of bytes in the current segment that have been written. */\n private int segmentOffset;\n\n /** The number of bytes that have been written so far. */\n private long globalOffset;\n\n /** The number of bytes that have been allocated so far. */\n private long allocatedBytes;\n\n public ManagedMemoryOutputStream(MemorySegmentPool segmentPool, long expectedSize)\n throws MemoryAllocationException {\n this.segmentPool = segmentPool;\n this.pageSize = segmentPool.pageSize();\n ensureCapacity(Math.max(expectedSize, 1L));\n }\n\n public long getPos() {\n return globalOffset;\n }\n\n public List getSegments() {\n return segments;\n }\n\n @Override\n public void write(int b) throws IOException {\n write(new byte[] {(byte) b}, 0, 1);\n }\n\n @Override\n public void write(@Nullable byte[] b, int off, int len) throws IOException {\n try {\n ensureCapacity(globalOffset + len);\n } catch (MemoryAllocationException e) {\n throw new RuntimeException(e);\n }\n\n while (len > 0) {\n int currentLen = Math.min(len, pageSize - segmentOffset);\n segments.get(segmentIndex).put(segmentOffset, b, off, currentLen);\n segmentOffset += currentLen;\n globalOffset += currentLen;\n if (segmentOffset >= pageSize) {\n segmentIndex++;\n segmentOffset = 0;\n }\n off += currentLen;\n len -= currentLen;\n }\n }\n\n private void ensureCapacity(long capacity) throws MemoryAllocationException {\n if (allocatedBytes >= capacity) {\n return;\n }\n\n int required =\n (int) (capacity % pageSize == 0 ? capacity / pageSize : capacity / pageSize + 1)\n - segments.size();\n\n List allocatedSegments = new ArrayList<>();\n for (int i = 0; i < required; i++) {\n MemorySegment memorySegment = segmentPool.nextSegment();\n if (memorySegment == null) {\n segmentPool.returnAll(allocatedSegments);\n throw new MemoryAllocationException();\n }\n allocatedSegments.add(memorySegment);\n }\n\n segments.addAll(allocatedSegments);\n allocatedBytes += (long) allocatedSegments.size() * pageSize;\n }\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_77_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_77_chunk_1", + "original_index": 1, + "content": "package org.apache.flink.iteration.datacache.nonkeyed;\n\nimport org.apache.flink.annotation.Internal;\nimport org.apache.flink.api.common.typeutils.TypeSerializer;\nimport org.apache.flink.core.fs.Path;\nimport org.apache.flink.core.memory.DataOutputView;\nimport org.apache.flink.core.memory.DataOutputViewStreamWrapper;\nimport org.apache.flink.core.memory.MemorySegment;\nimport org.apache.flink.runtime.memory.MemoryAllocationException;\nimport org.apache.flink.table.runtime.util.MemorySegmentPool;\n\nimport javax.annotation.Nullable;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Optional;\n\n/** A class that writes cache data to memory segments. */\n@Internal\nclass MemorySegmentWriter implements SegmentWriter {\n\n /** The tool to serialize received records into bytes. */\n private final TypeSerializer serializer;\n\n" + }, + { + "chunk_id": "doc_77_chunk_2", + "original_index": 2, + "content": " /** The pre-allocated path to hold cached records into the file system. */\n private final Path path;\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The output stream to write serialized content to memory segments. */\n private final ManagedMemoryOutputStream outputStream;\n\n /** The wrapper view of the output stream to be used with TypeSerializer API. */\n private final DataOutputView outputView;\n\n /** The number of records added so far. */\n private int count;\n\n" + }, + { + "chunk_id": "doc_77_chunk_3", + "original_index": 3, + "content": " MemorySegmentWriter(\n TypeSerializer serializer,\n Path path,\n MemorySegmentPool segmentPool,\n long expectedSize)\n throws MemoryAllocationException {\n this.serializer = serializer;\n this.path = path;\n this.segmentPool = segmentPool;\n this.outputStream = new ManagedMemoryOutputStream(segmentPool, expectedSize);\n this.outputView = new DataOutputViewStreamWrapper(outputStream);\n this.count = 0;\n }\n\n @Override\n public boolean addRecord(T record) throws IOException {\n if (outputStream.getPos() >= DataCacheWriter.MAX_SEGMENT_SIZE) {\n return false;\n }\n try {\n serializer.serialize(record, outputView);\n count++;\n return true;\n } catch (RuntimeException e) {\n if (e.getCause() instanceof MemoryAllocationException) {\n return false;\n }\n throw e;\n }\n }\n\n" + }, + { + "chunk_id": "doc_77_chunk_4", + "original_index": 4, + "content": " @Override\n public Optional finish() throws IOException {\n if (count > 0) {\n return Optional.of(new Segment(path, count, outputStream.getSegments()));\n } else {\n segmentPool.returnAll(outputStream.getSegments());\n return Optional.empty();\n }\n }\n\n /** An output stream subclass that accepts bytes and writes them to memory segments. */\n private static class ManagedMemoryOutputStream extends OutputStream {\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The number of bytes in a memory segment. */\n private final int pageSize;\n\n /** The memory segments containing written bytes. */\n private final List segments = new ArrayList<>();\n\n" + }, + { + "chunk_id": "doc_77_chunk_5", + "original_index": 5, + "content": " /** The index of the segment that currently accepts written bytes. */\n private int segmentIndex;\n\n /** The number of bytes in the current segment that have been written. */\n private int segmentOffset;\n\n /** The number of bytes that have been written so far. */\n private long globalOffset;\n\n /** The number of bytes that have been allocated so far. */\n private long allocatedBytes;\n\n public ManagedMemoryOutputStream(MemorySegmentPool segmentPool, long expectedSize)\n throws MemoryAllocationException {\n this.segmentPool = segmentPool;\n this.pageSize = segmentPool.pageSize();\n ensureCapacity(Math.max(expectedSize, 1L));\n }\n\n" + }, + { + "chunk_id": "doc_77_chunk_6", + "original_index": 6, + "content": " public long getPos() {\n return globalOffset;\n }\n\n public List getSegments() {\n return segments;\n }\n\n @Override\n public void write(int b) throws IOException {\n write(new byte[] {(byte) b}, 0, 1);\n }\n\n @Override\n public void write(@Nullable byte[] b, int off, int len) throws IOException {\n try {\n ensureCapacity(globalOffset + len);\n } catch (MemoryAllocationException e) {\n throw new RuntimeException(e);\n }\n\n" + }, + { + "chunk_id": "doc_77_chunk_7", + "original_index": 7, + "content": " while (len > 0) {\n int currentLen = Math.min(len, pageSize - segmentOffset);\n segments.get(segmentIndex).put(segmentOffset, b, off, currentLen);\n segmentOffset += currentLen;\n globalOffset += currentLen;\n if (segmentOffset >= pageSize) {\n segmentIndex++;\n segmentOffset = 0;\n }\n off += currentLen;\n len -= currentLen;\n }\n }\n\n private void ensureCapacity(long capacity) throws MemoryAllocationException {\n if (allocatedBytes >= capacity) {\n return;\n }\n\n int required =\n (int) (capacity % pageSize == 0 ? capacity / pageSize : capacity / pageSize + 1)\n - segments.size();\n\n" + }, + { + "chunk_id": "doc_77_chunk_8", + "original_index": 8, + "content": " List allocatedSegments = new ArrayList<>();\n for (int i = 0; i < required; i++) {\n MemorySegment memorySegment = segmentPool.nextSegment();\n if (memorySegment == null) {\n segmentPool.returnAll(allocatedSegments);\n throw new MemoryAllocationException();\n }\n allocatedSegments.add(memorySegment);\n }\n\n segments.addAll(allocatedSegments);\n allocatedBytes += (long) allocatedSegments.size() * pageSize;\n }\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_78", + "original_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.iteration.operator.coordinator;\n\nimport org.apache.flink.iteration.IterationID;\nimport org.apache.flink.iteration.operator.event.GloballyAlignedEvent;\nimport org.apache.flink.iteration.operator.event.SubtaskAlignedEvent;\nimport org.apache.flink.runtime.jobgraph.OperatorID;\nimport org.apache.flink.runtime.operators.coordination.EventReceivingTasks;\nimport org.apache.flink.runtime.operators.coordination.MockOperatorCoordinatorContext;\nimport org.apache.flink.runtime.operators.coordination.OperatorEvent;\nimport org.apache.flink.util.TestLogger;\n\nimport org.junit.Test;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.function.BiFunction;\n\nimport static org.junit.Assert.assertEquals;\n\n/** Tests the behavior of {@link HeadOperatorCoordinator}. */\npublic class HeadOperatorCoordinatorTest extends TestLogger {\n\n @Test(timeout = 60000L)\n public void testForwardEvents() throws Exception {\n IterationID iterationId = new IterationID();\n List operatorIds = Arrays.asList(new OperatorID(), new OperatorID());\n List parallelisms = Arrays.asList(2, 3);\n List receivingTasks =\n Arrays.asList(\n EventReceivingTasks.createForRunningTasks(),\n EventReceivingTasks.createForRunningTasks());\n List coordinators = new ArrayList<>();\n\n int totalParallelism = parallelisms.stream().mapToInt(i -> i).sum();\n\n for (int i = 0; i < operatorIds.size(); ++i) {\n HeadOperatorCoordinator coordinator =\n createCoordinator(iterationId, parallelisms.get(i), totalParallelism);\n setAllSubtasksReady(coordinator, receivingTasks.get(i), parallelisms.get(i));\n coordinators.add(coordinator);\n }\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(2, j, false)));\n checkSentEvent(1, new GloballyAlignedEvent(2, false), receivingTasks, parallelisms);\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(3, 0, false)));\n checkSentEvent(2, new GloballyAlignedEvent(3, true), receivingTasks, parallelisms);\n }\n\n private HeadOperatorCoordinator createCoordinator(\n IterationID iterationId, int parallelism, int totalHeadParallelism) {\n MockOperatorCoordinatorContext context =\n new MockOperatorCoordinatorContext(new OperatorID(), parallelism);\n return (HeadOperatorCoordinator)\n new HeadOperatorCoordinator.HeadOperatorCoordinatorProvider(\n new OperatorID(), iterationId, totalHeadParallelism)\n .create(context);\n }\n\n private void setAllSubtasksReady(\n HeadOperatorCoordinator coordinator,\n EventReceivingTasks receivingTasks,\n int parallelism) {\n for (int i = 0; i < parallelism; i++) {\n coordinator.executionAttemptReady(i, 0, receivingTasks.createGatewayForSubtask(i, 0));\n }\n }\n\n private void receiveEvent(\n List coordinators,\n List parallelisms,\n BiFunction> eventFactory)\n throws Exception {\n for (int i = 0; i < coordinators.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n List events = eventFactory.apply(i, j);\n for (OperatorEvent event : events) {\n coordinators.get(i).handleEventFromOperator(j, 0, event);\n }\n }\n }\n }\n\n private void checkSentEvent(\n int expectedNumEvents,\n GloballyAlignedEvent expectedLastEvent,\n List receivingTasks,\n List parallelisms)\n throws InterruptedException {\n for (int i = 0; i < parallelisms.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n while (true) {\n List events = receivingTasks.get(i).getSentEventsForSubtask(j);\n if (events.size() < expectedNumEvents) {\n Thread.sleep(50);\n continue;\n }\n\n assertEquals(expectedLastEvent, events.get(events.size() - 1));\n break;\n }\n }\n }\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_78_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_78_chunk_1", + "original_index": 1, + "content": "package org.apache.flink.iteration.operator.coordinator;\n\nimport org.apache.flink.iteration.IterationID;\nimport org.apache.flink.iteration.operator.event.GloballyAlignedEvent;\nimport org.apache.flink.iteration.operator.event.SubtaskAlignedEvent;\nimport org.apache.flink.runtime.jobgraph.OperatorID;\nimport org.apache.flink.runtime.operators.coordination.EventReceivingTasks;\nimport org.apache.flink.runtime.operators.coordination.MockOperatorCoordinatorContext;\nimport org.apache.flink.runtime.operators.coordination.OperatorEvent;\nimport org.apache.flink.util.TestLogger;\n\nimport org.junit.Test;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.function.BiFunction;\n\nimport static org.junit.Assert.assertEquals;\n\n/** Tests the behavior of {@link HeadOperatorCoordinator}. */\npublic class HeadOperatorCoordinatorTest extends TestLogger {\n\n" + }, + { + "chunk_id": "doc_78_chunk_2", + "original_index": 2, + "content": " @Test(timeout = 60000L)\n public void testForwardEvents() throws Exception {\n IterationID iterationId = new IterationID();\n List operatorIds = Arrays.asList(new OperatorID(), new OperatorID());\n List parallelisms = Arrays.asList(2, 3);\n List receivingTasks =\n Arrays.asList(\n EventReceivingTasks.createForRunningTasks(),\n EventReceivingTasks.createForRunningTasks());\n List coordinators = new ArrayList<>();\n\n" + }, + { + "chunk_id": "doc_78_chunk_3", + "original_index": 3, + "content": " int totalParallelism = parallelisms.stream().mapToInt(i -> i).sum();\n\n for (int i = 0; i < operatorIds.size(); ++i) {\n HeadOperatorCoordinator coordinator =\n createCoordinator(iterationId, parallelisms.get(i), totalParallelism);\n setAllSubtasksReady(coordinator, receivingTasks.get(i), parallelisms.get(i));\n coordinators.add(coordinator);\n }\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(2, j, false)));\n checkSentEvent(1, new GloballyAlignedEvent(2, false), receivingTasks, parallelisms);\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(3, 0, false)));\n checkSentEvent(2, new GloballyAlignedEvent(3, true), receivingTasks, parallelisms);\n }\n\n" + }, + { + "chunk_id": "doc_78_chunk_4", + "original_index": 4, + "content": " private HeadOperatorCoordinator createCoordinator(\n IterationID iterationId, int parallelism, int totalHeadParallelism) {\n MockOperatorCoordinatorContext context =\n new MockOperatorCoordinatorContext(new OperatorID(), parallelism);\n return (HeadOperatorCoordinator)\n new HeadOperatorCoordinator.HeadOperatorCoordinatorProvider(\n new OperatorID(), iterationId, totalHeadParallelism)\n .create(context);\n }\n\n private void setAllSubtasksReady(\n HeadOperatorCoordinator coordinator,\n EventReceivingTasks receivingTasks,\n int parallelism) {\n for (int i = 0; i < parallelism; i++) {\n coordinator.executionAttemptReady(i, 0, receivingTasks.createGatewayForSubtask(i, 0));\n }\n }\n\n" + }, + { + "chunk_id": "doc_78_chunk_5", + "original_index": 5, + "content": " private void receiveEvent(\n List coordinators,\n List parallelisms,\n BiFunction> eventFactory)\n throws Exception {\n for (int i = 0; i < coordinators.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n List events = eventFactory.apply(i, j);\n for (OperatorEvent event : events) {\n coordinators.get(i).handleEventFromOperator(j, 0, event);\n }\n }\n }\n }\n\n" + }, + { + "chunk_id": "doc_78_chunk_6", + "original_index": 6, + "content": " private void checkSentEvent(\n int expectedNumEvents,\n GloballyAlignedEvent expectedLastEvent,\n List receivingTasks,\n List parallelisms)\n throws InterruptedException {\n for (int i = 0; i < parallelisms.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n while (true) {\n List events = receivingTasks.get(i).getSentEventsForSubtask(j);\n if (events.size() < expectedNumEvents) {\n Thread.sleep(50);\n continue;\n }\n\n assertEquals(expectedLastEvent, events.get(events.size() - 1));\n break;\n }\n }\n }\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_79", + "original_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n // Creates a StopWordsRemover object and initializes its parameters.\n StopWordsRemover remover =\n new StopWordsRemover().setInputCols(\"input\").setOutputCols(\"output\");\n\n // Uses the StopWordsRemover object for feature transformations.\n Table outputTable = remover.transform(inputTable)[0];\n\n // Extracts and displays the results.\n for (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n }\n}\n", + "chunks": [ + { + "chunk_id": "doc_79_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_79_chunk_1", + "original_index": 1, + "content": "package org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n" + }, + { + "chunk_id": "doc_79_chunk_2", + "original_index": 2, + "content": " // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n" + }, + { + "chunk_id": "doc_79_chunk_3", + "original_index": 3, + "content": " // Creates a StopWordsRemover object and initializes its parameters.\n StopWordsRemover remover =\n new StopWordsRemover().setInputCols(\"input\").setOutputCols(\"output\");\n\n // Uses the StopWordsRemover object for feature transformations.\n Table outputTable = remover.transform(inputTable)[0];\n\n // Extracts and displays the results.\n for (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n }\n}\n" + } + ] + }, + { + "doc_id": "doc_80", + "original_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", + "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n# Simple program that creates an IndexToStringModel instance and uses it\n# for feature engineering.\n\nfrom pyflink.common import Types\nfrom pyflink.datastream import StreamExecutionEnvironment\nfrom pyflink.ml.feature.stringindexer import IndexToStringModel\nfrom pyflink.table import StreamTableEnvironment\n\n# create a new StreamExecutionEnvironment\nenv = StreamExecutionEnvironment.get_execution_environment()\n\n# create a StreamTableEnvironment\nt_env = StreamTableEnvironment.create(env)\n\n# generate input data\npredict_table = t_env.from_data_stream(\n env.from_collection([\n (0, 3),\n (1, 2),\n ],\n type_info=Types.ROW_NAMED(\n ['input_col1', 'input_col2'],\n [Types.INT(), Types.INT()])\n ))\n\n# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n", + "chunks": [ + { + "chunk_id": "doc_80_chunk_0", + "original_index": 0, + "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n" + }, + { + "chunk_id": "doc_80_chunk_1", + "original_index": 1, + "content": "# Simple program that creates an IndexToStringModel instance and uses it\n# for feature engineering.\n\nfrom pyflink.common import Types\nfrom pyflink.datastream import StreamExecutionEnvironment\nfrom pyflink.ml.feature.stringindexer import IndexToStringModel\nfrom pyflink.table import StreamTableEnvironment\n\n# create a new StreamExecutionEnvironment\nenv = StreamExecutionEnvironment.get_execution_environment()\n\n# create a StreamTableEnvironment\nt_env = StreamTableEnvironment.create(env)\n\n# generate input data\npredict_table = t_env.from_data_stream(\n env.from_collection([\n (0, 3),\n (1, 2),\n ],\n type_info=Types.ROW_NAMED(\n ['input_col1', 'input_col2'],\n [Types.INT(), Types.INT()])\n ))\n\n" + }, + { + "chunk_id": "doc_80_chunk_2", + "original_index": 2, + "content": "# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n" + }, + { + "chunk_id": "doc_80_chunk_3", + "original_index": 3, + "content": "# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n" + } + ] + }, + { + "doc_id": "doc_81", + "original_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\nPatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n", + "chunks": [ + { + "chunk_id": "doc_81_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_81_chunk_1", + "original_index": 1, + "content": "#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\n" + }, + { + "chunk_id": "doc_81_chunk_2", + "original_index": 2, + "content": "PatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n" + } + ] + }, + { + "doc_id": "doc_82", + "original_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", + "chunks": [ + { + "chunk_id": "doc_82_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_82_chunk_1", + "original_index": 1, + "content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n" + } + ] + }, + { + "doc_id": "doc_83", + "original_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#define LOG4CXX_TEST 1\n#include \n\n#include \n#include \"../appenderskeletontestcase.h\"\n#include \n#include \n#include \n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace net\n{\n\nclass MockTriggeringEventEvaluator :\n\tpublic virtual spi::TriggeringEventEvaluator\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(MockTriggeringEventEvaluator)\n\t\tLOG4CXX_CAST_ENTRY(spi::TriggeringEventEvaluator)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tMockTriggeringEventEvaluator()\n\t\t{\n\t\t}\n\n\t\tbool isTriggeringEvent(const spi::LoggingEventPtr& event) override\n\t\t{\n\t\t\treturn true;\n\t\t}\n\tprivate:\n\t\tMockTriggeringEventEvaluator(const MockTriggeringEventEvaluator&);\n\t\tMockTriggeringEventEvaluator& operator=(const MockTriggeringEventEvaluator&);\n};\n}\n}\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\nusing namespace log4cxx::net;\n\nIMPLEMENT_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\n\n/**\n Unit tests of log4cxx::SocketAppender\n */\nclass SMTPAppenderTestCase : public AppenderSkeletonTestCase\n{\n\t\tLOGUNIT_TEST_SUITE(SMTPAppenderTestCase);\n\t\t//\n\t\t// tests inherited from AppenderSkeletonTestCase\n\t\t//\n\t\tLOGUNIT_TEST(testDefaultThreshold);\n\t\tLOGUNIT_TEST(testSetOptionThreshold);\n\t\tLOGUNIT_TEST(testTrigger);\n\t\tLOGUNIT_TEST(testInvalid);\n//#define LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n#ifdef LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n\t\t// This test requires the following environment variables:\n\t\t// LOG4CXX_TEST_EMAIL_RECIPIENT - where the email is sent\n\t\t// LOG4CXX_TEST_SMTP_HOST_NAME - the email server\n\t\tLOGUNIT_TEST(testValid);\n#endif\n\t\tLOGUNIT_TEST_SUITE_END();\n\n\n\tpublic:\n\n\t\tAppenderSkeleton* createAppenderSkeleton() const\n\t\t{\n\t\t\treturn new log4cxx::net::SMTPAppender();\n\t\t}\n\n\t\tvoid setUp()\n\t\t{\n\t\t}\n\n\t\tvoid tearDown()\n\t\t{\n\t\t\tLogManager::resetConfiguration();\n\t\t}\n\n\t\t/**\n\t\t * Tests that triggeringPolicy element will set evaluator.\n\t\t */\n\t\tvoid testTrigger()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppender1.xml\");\n\t\t\tauto appender = log4cxx::cast(Logger::getRootLogger()->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto evaluator = appender->getEvaluator();\n\t\t\tLOGUNIT_ASSERT(evaluator);\n\t\t\tLOGUNIT_ASSERT_EQUAL(true, evaluator->instanceof(MockTriggeringEventEvaluator::getStaticClass()));\n\t\t}\n\n\t\tvoid testInvalid()\n\t\t{\n\t\t\tauto appender = std::make_shared();\n\t\t\tappender->setSMTPHost(LOG4CXX_STR(\"smtp.invalid\"));\n\t\t\tappender->setTo(LOG4CXX_STR(\"you@example.invalid\"));\n\t\t\tappender->setFrom(LOG4CXX_STR(\"me@example.invalid\"));\n\t\t\tappender->setLayout(std::make_shared());\n\t\t\tPool p;\n\t\t\tappender->activateOptions(p);\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\troot->addAppender(appender);\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\");\n\t\t\tLOG4CXX_ERROR(root, \"Sending Message\"); // The DefaultEvaluator should trigger e-mail generation\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(eh->errorReported());\n\t\t}\n\n\n\t\tvoid testValid()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppenderValid.xml\");\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\\n\\nThis paragraph should be preceeded by a blank line.\");\n\n\t\t\tauto appender = log4cxx::cast(root->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(!eh->errorReported());\n\t\t}\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(SMTPAppenderTestCase);\n\n", + "chunks": [ + { + "chunk_id": "doc_83_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_83_chunk_1", + "original_index": 1, + "content": "#define LOG4CXX_TEST 1\n#include \n\n#include \n#include \"../appenderskeletontestcase.h\"\n#include \n#include \n#include \n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace net\n{\n\nclass MockTriggeringEventEvaluator :\n\tpublic virtual spi::TriggeringEventEvaluator\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(MockTriggeringEventEvaluator)\n\t\tLOG4CXX_CAST_ENTRY(spi::TriggeringEventEvaluator)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n" + }, + { + "chunk_id": "doc_83_chunk_2", + "original_index": 2, + "content": "\t\tMockTriggeringEventEvaluator()\n\t\t{\n\t\t}\n\n\t\tbool isTriggeringEvent(const spi::LoggingEventPtr& event) override\n\t\t{\n\t\t\treturn true;\n\t\t}\n\tprivate:\n\t\tMockTriggeringEventEvaluator(const MockTriggeringEventEvaluator&);\n\t\tMockTriggeringEventEvaluator& operator=(const MockTriggeringEventEvaluator&);\n};\n}\n}\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\nusing namespace log4cxx::net;\n\nIMPLEMENT_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\n" + }, + { + "chunk_id": "doc_83_chunk_3", + "original_index": 3, + "content": "\n/**\n Unit tests of log4cxx::SocketAppender\n */\nclass SMTPAppenderTestCase : public AppenderSkeletonTestCase\n{\n\t\tLOGUNIT_TEST_SUITE(SMTPAppenderTestCase);\n\t\t//\n\t\t// tests inherited from AppenderSkeletonTestCase\n\t\t//\n\t\tLOGUNIT_TEST(testDefaultThreshold);\n\t\tLOGUNIT_TEST(testSetOptionThreshold);\n\t\tLOGUNIT_TEST(testTrigger);\n\t\tLOGUNIT_TEST(testInvalid);\n//#define LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n#ifdef LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n\t\t// This test requires the following environment variables:\n\t\t// LOG4CXX_TEST_EMAIL_RECIPIENT - where the email is sent\n\t\t// LOG4CXX_TEST_SMTP_HOST_NAME - the email server\n\t\tLOGUNIT_TEST(testValid);\n#endif\n\t\tLOGUNIT_TEST_SUITE_END();\n\n" + }, + { + "chunk_id": "doc_83_chunk_4", + "original_index": 4, + "content": "\n\tpublic:\n\n\t\tAppenderSkeleton* createAppenderSkeleton() const\n\t\t{\n\t\t\treturn new log4cxx::net::SMTPAppender();\n\t\t}\n\n\t\tvoid setUp()\n\t\t{\n\t\t}\n\n\t\tvoid tearDown()\n\t\t{\n\t\t\tLogManager::resetConfiguration();\n\t\t}\n\n\t\t/**\n\t\t * Tests that triggeringPolicy element will set evaluator.\n\t\t */\n\t\tvoid testTrigger()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppender1.xml\");\n\t\t\tauto appender = log4cxx::cast(Logger::getRootLogger()->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto evaluator = appender->getEvaluator();\n\t\t\tLOGUNIT_ASSERT(evaluator);\n\t\t\tLOGUNIT_ASSERT_EQUAL(true, evaluator->instanceof(MockTriggeringEventEvaluator::getStaticClass()));\n\t\t}\n\n" + }, + { + "chunk_id": "doc_83_chunk_5", + "original_index": 5, + "content": "\t\tvoid testInvalid()\n\t\t{\n\t\t\tauto appender = std::make_shared();\n\t\t\tappender->setSMTPHost(LOG4CXX_STR(\"smtp.invalid\"));\n\t\t\tappender->setTo(LOG4CXX_STR(\"you@example.invalid\"));\n\t\t\tappender->setFrom(LOG4CXX_STR(\"me@example.invalid\"));\n\t\t\tappender->setLayout(std::make_shared());\n\t\t\tPool p;\n\t\t\tappender->activateOptions(p);\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\troot->addAppender(appender);\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\");\n\t\t\tLOG4CXX_ERROR(root, \"Sending Message\"); // The DefaultEvaluator should trigger e-mail generation\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(eh->errorReported());\n\t\t}\n\n" + }, + { + "chunk_id": "doc_83_chunk_6", + "original_index": 6, + "content": "\n\t\tvoid testValid()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppenderValid.xml\");\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\\n\\nThis paragraph should be preceeded by a blank line.\");\n\n\t\t\tauto appender = log4cxx::cast(root->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(!eh->errorReported());\n\t\t}\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(SMTPAppenderTestCase);\n\n" + } + ] + }, + { + "doc_id": "doc_84", + "original_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\nvoid BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", + "chunks": [ + { + "chunk_id": "doc_84_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n" + }, + { + "chunk_id": "doc_84_chunk_1", + "original_index": 1, + "content": " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\n" + }, + { + "chunk_id": "doc_84_chunk_2", + "original_index": 2, + "content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n" + } + ] + }, + { + "doc_id": "doc_85", + "original_uuid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \n/* Prevent error C2491: 'std::numpunct<_Elem>::id': definition of dllimport static data member not allowed */\n#if defined(_MSC_VER) && (LOG4CXX_UNICHAR_API || LOG4CXX_LOGCHAR_IS_UNICHAR)\n#define __FORCE_INSTANCE\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\ntypedef std::basic_stringstream LogStream;\n\nLogString LOG4CXX_NS::hexdump(const void* bytes, uint32_t len, HexdumpFlags flags){\n\tLogString ret;\n\tconst uint8_t* bytes_u8 = static_cast(bytes);\n\tLogStream sstream;\n#if LOG4CXX_LOGCHAR_IS_WCHAR\n\tconst wchar_t fill_char = L'0';\n\tconst wchar_t space_fill_char = L' ';\n#else\n\tconst logchar fill_char = '0';\n\tconst logchar space_fill_char = ' ';\n#endif\n\n\tif(flags & HexdumpFlags::AddStartingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\tfor(uint32_t offset = 0; offset < len; offset += 16){\n\t\tif(offset != 0){\n\t\t\tsstream << LOG4CXX_EOL;\n\t\t}\n\n\t\t// Print out the offset\n\t\tsstream << std::hex << std::setw(8) << std::setfill(fill_char) << offset << std::resetiosflags(std::ios_base::fmtflags(0));\n\n\t\tsstream << std::setw(0) << LOG4CXX_STR(\" \");\n\n\t\t// Print out the first 8 bytes\n\t\tfor(int byte = 0; byte < 8; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 8){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tsstream << std::setfill(space_fill_char);\n\t\t\tif(byte != 8){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n\t\tsstream << LOG4CXX_STR(\" \");\n\n\t\t// Print out the last 8 bytes\n\t\tfor(int byte = 8; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 15){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tif(byte != 15){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n\t\t// Print out the ASCII text\n\t\tsstream << LOG4CXX_STR(\" |\");\n\t\tfor(int byte = 0; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tif(std::isprint(bytes_u8[offset + byte])){\n\t\t\t\tlogchar to_append = bytes_u8[offset + byte];\n\t\t\t\tsstream << to_append;\n\t\t\t}else{\n\t\t\t\tsstream << LOG4CXX_STR(\".\");\n\t\t\t}\n\t\t}\n\t\tsstream << LOG4CXX_STR(\"|\");\n\t}\n\n\tif(flags & HexdumpFlags::AddEndingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\treturn sstream.str();\n}\n", + "chunks": [ + { + "chunk_id": "doc_85_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_85_chunk_1", + "original_index": 1, + "content": "#include \n/* Prevent error C2491: 'std::numpunct<_Elem>::id': definition of dllimport static data member not allowed */\n#if defined(_MSC_VER) && (LOG4CXX_UNICHAR_API || LOG4CXX_LOGCHAR_IS_UNICHAR)\n#define __FORCE_INSTANCE\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\ntypedef std::basic_stringstream LogStream;\n\nLogString LOG4CXX_NS::hexdump(const void* bytes, uint32_t len, HexdumpFlags flags){\n\tLogString ret;\n\tconst uint8_t* bytes_u8 = static_cast(bytes);\n\tLogStream sstream;\n#if LOG4CXX_LOGCHAR_IS_WCHAR\n\tconst wchar_t fill_char = L'0';\n\tconst wchar_t space_fill_char = L' ';\n#else\n\tconst logchar fill_char = '0';\n\tconst logchar space_fill_char = ' ';\n#endif\n\n" + }, + { + "chunk_id": "doc_85_chunk_2", + "original_index": 2, + "content": "\tif(flags & HexdumpFlags::AddStartingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\tfor(uint32_t offset = 0; offset < len; offset += 16){\n\t\tif(offset != 0){\n\t\t\tsstream << LOG4CXX_EOL;\n\t\t}\n\n\t\t// Print out the offset\n\t\tsstream << std::hex << std::setw(8) << std::setfill(fill_char) << offset << std::resetiosflags(std::ios_base::fmtflags(0));\n\n\t\tsstream << std::setw(0) << LOG4CXX_STR(\" \");\n\n\t\t// Print out the first 8 bytes\n\t\tfor(int byte = 0; byte < 8; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 8){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tsstream << std::setfill(space_fill_char);\n\t\t\tif(byte != 8){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n" + }, + { + "chunk_id": "doc_85_chunk_3", + "original_index": 3, + "content": "\t\tsstream << LOG4CXX_STR(\" \");\n\n\t\t// Print out the last 8 bytes\n\t\tfor(int byte = 8; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 15){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tif(byte != 15){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n\t\t// Print out the ASCII text\n\t\tsstream << LOG4CXX_STR(\" |\");\n\t\tfor(int byte = 0; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tif(std::isprint(bytes_u8[offset + byte])){\n\t\t\t\tlogchar to_append = bytes_u8[offset + byte];\n\t\t\t\tsstream << to_append;\n\t\t\t}else{\n\t\t\t\tsstream << LOG4CXX_STR(\".\");\n\t\t\t}\n\t\t}\n\t\tsstream << LOG4CXX_STR(\"|\");\n\t}\n\n\tif(flags & HexdumpFlags::AddEndingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\treturn sstream.str();\n}\n" + } + ] + }, + { + "doc_id": "doc_86", + "original_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", + "chunks": [ + { + "chunk_id": "doc_86_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\n" + }, + { + "chunk_id": "doc_86_chunk_1", + "original_index": 1, + "content": "using namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n" + } + ] + }, + { + "doc_id": "doc_87", + "original_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \n#include \n#include \n#include \n\n#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/threadfilter.h\"\n#include \n#include \n#include \n#include \n#include \"testchar.h\"\n#include \n#include \n#include \n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(MinimumTestCase)\n{\n\tLOGUNIT_TEST_SUITE(MinimumTestCase);\n\tLOGUNIT_TEST(simple);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\troot->removeAllAppenders();\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid simple()\n\t{\n\t\tLayoutPtr layout = LayoutPtr(new SimpleLayout());\n\t\tAppenderPtr appender = FileAppenderPtr(new FileAppender(layout, LOG4CXX_STR(\"output/simple\"), false));\n\t\troot->addAppender(appender);\n\t\tcommon();\n\n\t\tLOGUNIT_ASSERT(Compare::compare(LOG4CXX_FILE(\"output/simple\"), LOG4CXX_FILE(\"witness/simple\")));\n\t}\n\n\tstd::string createMessage(int i, Pool & pool)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = 0;\n\n\t\t// In the lines below, the logger names are chosen as an aid in\n\t\t// remembering their level values. In general, the logger names\n\t\t// have no bearing to level values.\n\t\tLoggerPtr ERRlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"ERR\"));\n\t\tERRlogger->setLevel(Level::getError());\n\n\t\tLoggerPtr INF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF\"));\n\t\tINF->setLevel(Level::getInfo());\n\n\t\tLoggerPtr INF_ERR = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR\"));\n\t\tINF_ERR->setLevel(Level::getError());\n\n\t\tLoggerPtr DEB = Logger::getLogger(LOG4CXX_TEST_STR(\"DEB\"));\n\t\tDEB->setLevel(Level::getDebug());\n\n\t\t// Note: categories with undefined level\n\t\tLoggerPtr INF_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.UNDEF\"));\n\t\tLoggerPtr INF_ERR_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR.UNDEF\"));\n\t\tLoggerPtr UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"UNDEF\"));\n\n\t\tstd::string msg(\"Message \");\n\n\t\tPool pool;\n\n\t\t// These should all log.----------------------------\n\t\tLOG4CXX_FATAL(ERRlogger, createMessage(i, pool));\n\t\ti++; //0\n\t\tLOG4CXX_ERROR(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF, createMessage(i, pool));\n\t\ti++; // 2\n\t\tLOG4CXX_ERROR(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_UNDEF, createMessage(i, pool));\n\t\ti++; //6\n\t\tLOG4CXX_ERROR(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR, createMessage(i, pool));\n\t\ti++; // 10\n\t\tLOG4CXX_ERROR(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_ERROR(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(DEB, createMessage(i, pool));\n\t\ti++; //14\n\t\tLOG4CXX_ERROR(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(DEB, createMessage(i, pool));\n\t\ti++;\n\n\t\t// defaultLevel=DEBUG\n\t\tLOG4CXX_FATAL(UNDEF, createMessage(i, pool));\n\t\ti++; // 19\n\t\tLOG4CXX_ERROR(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\t// The following should not log\n\t\tLOG4CXX_WARN(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_DEBUG(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_WARN(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\tLOG4CXX_INFO(INF, LOG4CXX_TEST_STR(\"Messages should bear numbers 0 through 23.\"));\n\t}\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\nprivate:\n\tstatic const File FILTERED;\n};\n\n\nconst File MinimumTestCase::FILTERED(\"output/minimumfiltered\");\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(MinimumTestCase);\n", + "chunks": [ + { + "chunk_id": "doc_87_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \n#include \n#include \n#include \n\n" + }, + { + "chunk_id": "doc_87_chunk_1", + "original_index": 1, + "content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/threadfilter.h\"\n#include \n#include \n#include \n#include \n#include \"testchar.h\"\n#include \n#include \n#include \n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(MinimumTestCase)\n{\n\tLOGUNIT_TEST_SUITE(MinimumTestCase);\n\tLOGUNIT_TEST(simple);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\troot->removeAllAppenders();\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n" + }, + { + "chunk_id": "doc_87_chunk_2", + "original_index": 2, + "content": "\tvoid simple()\n\t{\n\t\tLayoutPtr layout = LayoutPtr(new SimpleLayout());\n\t\tAppenderPtr appender = FileAppenderPtr(new FileAppender(layout, LOG4CXX_STR(\"output/simple\"), false));\n\t\troot->addAppender(appender);\n\t\tcommon();\n\n\t\tLOGUNIT_ASSERT(Compare::compare(LOG4CXX_FILE(\"output/simple\"), LOG4CXX_FILE(\"witness/simple\")));\n\t}\n\n\tstd::string createMessage(int i, Pool & pool)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = 0;\n\n\t\t// In the lines below, the logger names are chosen as an aid in\n\t\t// remembering their level values. In general, the logger names\n\t\t// have no bearing to level values.\n\t\tLoggerPtr ERRlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"ERR\"));\n\t\tERRlogger->setLevel(Level::getError());\n\n" + }, + { + "chunk_id": "doc_87_chunk_3", + "original_index": 3, + "content": "\t\tLoggerPtr INF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF\"));\n\t\tINF->setLevel(Level::getInfo());\n\n\t\tLoggerPtr INF_ERR = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR\"));\n\t\tINF_ERR->setLevel(Level::getError());\n\n\t\tLoggerPtr DEB = Logger::getLogger(LOG4CXX_TEST_STR(\"DEB\"));\n\t\tDEB->setLevel(Level::getDebug());\n\n\t\t// Note: categories with undefined level\n\t\tLoggerPtr INF_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.UNDEF\"));\n\t\tLoggerPtr INF_ERR_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR.UNDEF\"));\n\t\tLoggerPtr UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"UNDEF\"));\n\n" + }, + { + "chunk_id": "doc_87_chunk_4", + "original_index": 4, + "content": "\t\tstd::string msg(\"Message \");\n\n\t\tPool pool;\n\n\t\t// These should all log.----------------------------\n\t\tLOG4CXX_FATAL(ERRlogger, createMessage(i, pool));\n\t\ti++; //0\n\t\tLOG4CXX_ERROR(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF, createMessage(i, pool));\n\t\ti++; // 2\n\t\tLOG4CXX_ERROR(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_UNDEF, createMessage(i, pool));\n\t\ti++; //6\n\t\tLOG4CXX_ERROR(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR, createMessage(i, pool));\n\t\ti++; // 10\n\t\tLOG4CXX_ERROR(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_ERROR(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n" + }, + { + "chunk_id": "doc_87_chunk_5", + "original_index": 5, + "content": "\t\tLOG4CXX_FATAL(DEB, createMessage(i, pool));\n\t\ti++; //14\n\t\tLOG4CXX_ERROR(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(DEB, createMessage(i, pool));\n\t\ti++;\n\n\t\t// defaultLevel=DEBUG\n\t\tLOG4CXX_FATAL(UNDEF, createMessage(i, pool));\n\t\ti++; // 19\n\t\tLOG4CXX_ERROR(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\t// The following should not log\n\t\tLOG4CXX_WARN(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_DEBUG(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n" + }, + { + "chunk_id": "doc_87_chunk_6", + "original_index": 6, + "content": "\t\tLOG4CXX_WARN(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\tLOG4CXX_INFO(INF, LOG4CXX_TEST_STR(\"Messages should bear numbers 0 through 23.\"));\n\t}\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\nprivate:\n\tstatic const File FILTERED;\n};\n\n\nconst File MinimumTestCase::FILTERED(\"output/minimumfiltered\");\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(MinimumTestCase);\n" + } + ] + }, + { + "doc_id": "doc_88", + "original_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", + "chunks": [ + { + "chunk_id": "doc_88_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_88_chunk_1", + "original_index": 1, + "content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n" + } + ] + }, + { + "doc_id": "doc_89", + "original_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \"testchar.h\"\n#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/iso8601filter.h\"\n#include \"util/absolutetimefilter.h\"\n#include \"util/relativetimefilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/threadfilter.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/filenamefilter.h\"\n#include \"vectorappender.h\"\n#include \n#include \n#include \n#include \n#include \n#include \n\n#define REGEX_STR(x) x\n#define PAT0 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* - Message [0-9]\\\\{1,2\\\\}\")\n#define PAT1 ISO8601_PAT REGEX_STR(\" \") PAT0\n#define PAT2 ABSOLUTE_DATE_AND_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT3 ABSOLUTE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT4 RELATIVE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT5 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* : Message [0-9]\\\\{1,2\\\\}\")\n#define PAT6 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) .*patternlayouttest.cpp\\\\([0-9]\\\\{1,4\\\\}\\\\): Message [0-9]\\\\{1,3\\\\}\")\n#define PAT11a REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ log4j.PatternLayoutTest: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT11b REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ root: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT12 REGEX_STR(\"^\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) \")\\\n\tREGEX_STR(\".*patternlayouttest.cpp([0-9]\\\\{1,4\\\\}): \")\\\n\tREGEX_STR(\"Message [0-9]\\\\{1,2\\\\}\")\n#define PAT_MDC_1 REGEX_STR(\"\")\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(FMTTestCase)\n{\n\tLOGUNIT_TEST_SUITE(FMTTestCase);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST(test1_expanded);\n\tLOGUNIT_TEST(test10);\n//\tLOGUNIT_TEST(test_date);\n\tLOGUNIT_TEST_SUITE_END();\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\tMDC::clear();\n\t\tlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"java.org.apache.log4j.PatternLayoutTest\"));\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tMDC::clear();\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid test1()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test1_expanded()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1_expanded.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test10()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout10.properties\"));\n\t\tcommon();\n\n\t\tControlFilter filter1;\n\t\tfilter1 << PAT6;\n\t\tThreadFilter filter2;\n\t\tLineNumberFilter filter3;\n\t\tFilenameFilter filenameFilter(__FILE__, \"patternlayouttest.cpp\");\n\n\n\t\tstd::vector filters;\n\t\tfilters.push_back(&filenameFilter);\n\t\tfilters.push_back(&filter1);\n\t\tfilters.push_back(&filter2);\n\t\tfilters.push_back(&filter3);\n\n\n\t\ttry\n\t\t{\n\t\t\tTransformer::transform(TEMP, FILTERED, filters);\n\t\t}\n\t\tcatch (UnexpectedFormatException& e)\n\t\t{\n\t\t\tstd::cout << \"UnexpectedFormatException :\" << e.what() << std::endl;\n\t\t\tthrow;\n\t\t}\n\n\t\tLOGUNIT_ASSERT(Compare::compare(FILTERED, LOG4CXX_FILE(\"witness/patternLayout.10\")));\n\t}\n\n\tvoid test_date(){\n\t\tstd::tm tm = {};\n\t\tstd::stringstream ss(\"2013-04-11 08:35:34\");\n\t\tss >> std::get_time(&tm, \"%Y-%m-%d %H:%M:%S\");\n\t\tauto tp = std::chrono::system_clock::from_time_t(std::mktime(&tm));\n\t\tuint64_t micros = std::chrono::duration_cast(tp.time_since_epoch()).count();\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction([micros](){\n\t\t\treturn micros;\n\t\t});\n\n\t\tlog4cxx::spi::LoggingEventPtr logEvt = std::make_shared(LOG4CXX_STR(\"foo\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t Level::getInfo(),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t LOG4CXX_STR(\"A Message\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t log4cxx::spi::LocationInfo::getLocationUnavailable());\n\t\tFMTLayout layout(LOG4CXX_STR(\"{d:%Y-%m-%d %H:%M:%S} {message}\"));\n\t\tLogString output;\n\t\tlog4cxx::helpers::Pool pool;\n\t\tlayout.format( output, logEvt, pool);\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction(nullptr);\n\n\t\tLOGUNIT_ASSERT_EQUAL(LOG4CXX_STR(\"2013-04-11 09:35:34 A Message\"), output);\n\t}\n\n\tstd::string createMessage(Pool & pool, int i)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = -1;\n\n\t\tPool pool;\n\n\n\t\tLOG4CXX_DEBUG(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_DEBUG(root, createMessage(pool, i));\n\n\t\tLOG4CXX_INFO(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_INFO(root, createMessage(pool, i));\n\n\t\tLOG4CXX_WARN(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_WARN(root, createMessage(pool, i));\n\n\t\tLOG4CXX_ERROR(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_ERROR(root, createMessage(pool, i));\n\n\t\tLOG4CXX_FATAL(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_FATAL(root, createMessage(pool, i));\n\t}\n\n\tprivate:\n\t\tstatic const LogString FILTERED;\n\t\tstatic const LogString TEMP;\n\n};\n\nconst LogString FMTTestCase::TEMP(LOG4CXX_STR(\"output/fmtlayout\"));\nconst LogString FMTTestCase::FILTERED(LOG4CXX_STR(\"output/fmtlayoutfiltered\"));\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(FMTTestCase);\n", + "chunks": [ + { + "chunk_id": "doc_89_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \"testchar.h\"\n" + }, + { + "chunk_id": "doc_89_chunk_1", + "original_index": 1, + "content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/iso8601filter.h\"\n#include \"util/absolutetimefilter.h\"\n#include \"util/relativetimefilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/threadfilter.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/filenamefilter.h\"\n#include \"vectorappender.h\"\n#include \n#include \n#include \n#include \n#include \n#include \n\n#define REGEX_STR(x) x\n#define PAT0 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* - Message [0-9]\\\\{1,2\\\\}\")\n#define PAT1 ISO8601_PAT REGEX_STR(\" \") PAT0\n#define PAT2 ABSOLUTE_DATE_AND_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT3 ABSOLUTE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT4 RELATIVE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT5 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* : Message [0-9]\\\\{1,2\\\\}\")\n" + }, + { + "chunk_id": "doc_89_chunk_2", + "original_index": 2, + "content": "#define PAT6 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) .*patternlayouttest.cpp\\\\([0-9]\\\\{1,4\\\\}\\\\): Message [0-9]\\\\{1,3\\\\}\")\n#define PAT11a REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ log4j.PatternLayoutTest: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT11b REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ root: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT12 REGEX_STR(\"^\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) \")\\\n\tREGEX_STR(\".*patternlayouttest.cpp([0-9]\\\\{1,4\\\\}): \")\\\n\tREGEX_STR(\"Message [0-9]\\\\{1,2\\\\}\")\n#define PAT_MDC_1 REGEX_STR(\"\")\n\n" + }, + { + "chunk_id": "doc_89_chunk_3", + "original_index": 3, + "content": "using namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(FMTTestCase)\n{\n\tLOGUNIT_TEST_SUITE(FMTTestCase);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST(test1_expanded);\n\tLOGUNIT_TEST(test10);\n//\tLOGUNIT_TEST(test_date);\n\tLOGUNIT_TEST_SUITE_END();\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\tMDC::clear();\n\t\tlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"java.org.apache.log4j.PatternLayoutTest\"));\n\t}\n\n" + }, + { + "chunk_id": "doc_89_chunk_4", + "original_index": 4, + "content": "\tvoid tearDown()\n\t{\n\t\tMDC::clear();\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid test1()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test1_expanded()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1_expanded.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n" + }, + { + "chunk_id": "doc_89_chunk_5", + "original_index": 5, + "content": "\tvoid test10()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout10.properties\"));\n\t\tcommon();\n\n\t\tControlFilter filter1;\n\t\tfilter1 << PAT6;\n\t\tThreadFilter filter2;\n\t\tLineNumberFilter filter3;\n\t\tFilenameFilter filenameFilter(__FILE__, \"patternlayouttest.cpp\");\n\n\n\t\tstd::vector filters;\n\t\tfilters.push_back(&filenameFilter);\n\t\tfilters.push_back(&filter1);\n\t\tfilters.push_back(&filter2);\n\t\tfilters.push_back(&filter3);\n\n" + }, + { + "chunk_id": "doc_89_chunk_6", + "original_index": 6, + "content": "\n\t\ttry\n\t\t{\n\t\t\tTransformer::transform(TEMP, FILTERED, filters);\n\t\t}\n\t\tcatch (UnexpectedFormatException& e)\n\t\t{\n\t\t\tstd::cout << \"UnexpectedFormatException :\" << e.what() << std::endl;\n\t\t\tthrow;\n\t\t}\n\n\t\tLOGUNIT_ASSERT(Compare::compare(FILTERED, LOG4CXX_FILE(\"witness/patternLayout.10\")));\n\t}\n\n\tvoid test_date(){\n\t\tstd::tm tm = {};\n\t\tstd::stringstream ss(\"2013-04-11 08:35:34\");\n\t\tss >> std::get_time(&tm, \"%Y-%m-%d %H:%M:%S\");\n\t\tauto tp = std::chrono::system_clock::from_time_t(std::mktime(&tm));\n\t\tuint64_t micros = std::chrono::duration_cast(tp.time_since_epoch()).count();\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction([micros](){\n\t\t\treturn micros;\n\t\t});\n\n" + }, + { + "chunk_id": "doc_89_chunk_7", + "original_index": 7, + "content": "\t\tlog4cxx::spi::LoggingEventPtr logEvt = std::make_shared(LOG4CXX_STR(\"foo\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t Level::getInfo(),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t LOG4CXX_STR(\"A Message\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t log4cxx::spi::LocationInfo::getLocationUnavailable());\n\t\tFMTLayout layout(LOG4CXX_STR(\"{d:%Y-%m-%d %H:%M:%S} {message}\"));\n\t\tLogString output;\n\t\tlog4cxx::helpers::Pool pool;\n\t\tlayout.format( output, logEvt, pool);\n\n" + }, + { + "chunk_id": "doc_89_chunk_8", + "original_index": 8, + "content": "\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction(nullptr);\n\n\t\tLOGUNIT_ASSERT_EQUAL(LOG4CXX_STR(\"2013-04-11 09:35:34 A Message\"), output);\n\t}\n\n\tstd::string createMessage(Pool & pool, int i)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = -1;\n\n\t\tPool pool;\n\n\n\t\tLOG4CXX_DEBUG(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_DEBUG(root, createMessage(pool, i));\n\n\t\tLOG4CXX_INFO(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_INFO(root, createMessage(pool, i));\n\n\t\tLOG4CXX_WARN(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_WARN(root, createMessage(pool, i));\n\n" + }, + { + "chunk_id": "doc_89_chunk_9", + "original_index": 9, + "content": "\t\tLOG4CXX_ERROR(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_ERROR(root, createMessage(pool, i));\n\n\t\tLOG4CXX_FATAL(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_FATAL(root, createMessage(pool, i));\n\t}\n\n\tprivate:\n\t\tstatic const LogString FILTERED;\n\t\tstatic const LogString TEMP;\n\n};\n\nconst LogString FMTTestCase::TEMP(LOG4CXX_STR(\"output/fmtlayout\"));\nconst LogString FMTTestCase::FILTERED(LOG4CXX_STR(\"output/fmtlayoutfiltered\"));\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(FMTTestCase);\n" + } + ] + }, + { + "doc_id": "doc_90", + "original_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#ifndef _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n#define _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n#include \n\nnamespace LOG4CXX_NS\n{\n\nnamespace helpers\n{\n\n/**\n* Writes text to a character-output stream buffering\n* requests to increase efficiency.\n*/\nclass LOG4CXX_EXPORT BufferedWriter : public Writer\n{\n\tprivate:\n\t\tLOG4CXX_DECLARE_PRIVATE_MEMBER_PTR(BufferedWriterPriv, m_priv)\n\n\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", + "chunks": [ + { + "chunk_id": "doc_90_chunk_0", + "original_index": 0, + "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n" + }, + { + "chunk_id": "doc_90_chunk_1", + "original_index": 1, + "content": "#ifndef _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n#define _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n#include \n\nnamespace LOG4CXX_NS\n{\n\nnamespace helpers\n{\n\n/**\n* Writes text to a character-output stream buffering\n* requests to increase efficiency.\n*/\nclass LOG4CXX_EXPORT BufferedWriter : public Writer\n{\n\tprivate:\n\t\tLOG4CXX_DECLARE_PRIVATE_MEMBER_PTR(BufferedWriterPriv, m_priv)\n\n" + }, + { + "chunk_id": "doc_90_chunk_2", + "original_index": 2, + "content": "\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n" + } + ] + } +] \ No newline at end of file diff --git a/experiments/data/contextual-embeddings/original_data/evaluation_set.jsonl b/experiments/data/contextual-embeddings/original_data/evaluation_set.jsonl new file mode 100644 index 0000000..3357715 --- /dev/null +++ b/experiments/data/contextual-embeddings/original_data/evaluation_set.jsonl @@ -0,0 +1,248 @@ +{"query": "What is the purpose of the DiffExecutor struct?", "answer": "The DiffExecutor struct wraps a primary executor and a secondary executor. As stated in the comment, \"In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\" It is used for differential fuzzing, where the same input is run through both executors and the results are compared.", "golden_doc_uuids": ["5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145"], "golden_chunk_uuids": [["5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", 0]], "golden_documents": [{"uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\nimpl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n .pre_observe_first_all(observers.primary.as_mut())?;\n observers.primary.as_mut().pre_exec_all(state, input)?;\n let ret1 = self.primary.run_target(fuzzer, state, mgr, input)?;\n observers\n .primary\n .as_mut()\n .post_exec_all(state, input, &ret1)?;\n observers\n .differential\n .post_observe_first_all(observers.primary.as_mut())?;\n observers\n .differential\n .pre_observe_second_all(observers.secondary.as_mut())?;\n observers.secondary.as_mut().pre_exec_all(state, input)?;\n let ret2 = self.secondary.run_target(fuzzer, state, mgr, input)?;\n observers\n .secondary\n .as_mut()\n .post_exec_all(state, input, &ret2)?;\n observers\n .differential\n .post_observe_second_all(observers.secondary.as_mut())?;\n if ret1 == ret2 {\n Ok(ret1)\n } else {\n // We found a diff in the exit codes!\n Ok(ExitKind::Diff {\n primary: ret1.into(),\n secondary: ret2.into(),\n })\n }\n }\n}\n\n/// Proxy the observers of the inner executors\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(\n bound = \"A: serde::Serialize + serde::de::DeserializeOwned, B: serde::Serialize + serde::de::DeserializeOwned, DOT: serde::Serialize + serde::de::DeserializeOwned\"\n)]\npub struct ProxyObserversTuple {\n primary: OwnedMutPtr,\n secondary: OwnedMutPtr,\n differential: DOT,\n}\n\nimpl ObserversTuple for ProxyObserversTuple\nwhere\n A: ObserversTuple,\n B: ObserversTuple,\n DOT: DifferentialObserversTuple,\n S: UsesInput,\n{\n fn pre_exec_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_all(state, input)\n }\n\n fn post_exec_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential.post_exec_all(state, input, exit_kind)\n }\n\n fn pre_exec_child_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_child_all(state, input)\n }\n\n fn post_exec_child_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential\n .post_exec_child_all(state, input, exit_kind)\n }\n\n /// Returns true if a `stdout` observer was added to the list\n #[inline]\n fn observes_stdout(&self) -> bool {\n self.primary.as_ref().observes_stdout() || self.secondary.as_ref().observes_stdout()\n }\n /// Returns true if a `stderr` observer was added to the list\n #[inline]\n fn observes_stderr(&self) -> bool {\n self.primary.as_ref().observes_stderr() || self.secondary.as_ref().observes_stderr()\n }\n\n /// Runs `observe_stdout` for all stdout observers in the list\n fn observe_stdout(&mut self, stdout: &[u8]) {\n self.primary.as_mut().observe_stderr(stdout);\n self.secondary.as_mut().observe_stderr(stdout);\n }\n\n /// Runs `observe_stderr` for all stderr observers in the list\n fn observe_stderr(&mut self, stderr: &[u8]) {\n self.primary.as_mut().observe_stderr(stderr);\n self.secondary.as_mut().observe_stderr(stderr);\n }\n}\n\nimpl MatchName for ProxyObserversTuple\nwhere\n A: MatchName,\n B: MatchName,\n DOT: MatchName,\n{\n fn match_name(&self, name: &str) -> Option<&T> {\n if let Some(t) = self.primary.as_ref().match_name::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_ref().match_name::(name) {\n Some(t)\n } else {\n self.differential.match_name::(name)\n }\n }\n fn match_name_mut(&mut self, name: &str) -> Option<&mut T> {\n if let Some(t) = self.primary.as_mut().match_name_mut::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_mut().match_name_mut::(name) {\n Some(t)\n } else {\n self.differential.match_name_mut::(name)\n }\n }\n}\n\nimpl ProxyObserversTuple {\n fn set(&mut self, primary: &A, secondary: &B) {\n self.primary = OwnedMutPtr::Ptr(ptr::from_ref(primary) as *mut A);\n self.secondary = OwnedMutPtr::Ptr(ptr::from_ref(secondary) as *mut B);\n }\n}\n\nimpl UsesObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n type Observers = ProxyObserversTuple;\n}\n\nimpl UsesState for DiffExecutor\nwhere\n A: UsesState,\n B: UsesState,\n{\n type State = A::State;\n}\n\nimpl HasObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n #[inline]\n fn observers(&self) -> &ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_ref().unwrap()\n }\n }\n\n #[inline]\n fn observers_mut(&mut self) -> &mut ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_mut().unwrap()\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl/src/executors/differential.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 13, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 0, "content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\n", "meta": {"hash_id": "fd2b8aaa5316a99fe16981af3ff296754aef33bb48b58092ccc933117a0ad96c"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 1, "content": "impl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n", "meta": {"hash_id": "6656f04fb80fde607005ee204f75cbaf16dc2b205c50269d122b4d923e104fe1"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 2, "content": " /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n", "meta": {"hash_id": "2aec47965f03eb65b6c179dc5a4b86b42dde0933f32e694bb69eff27f74d2012"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 3, "content": " .pre_observe_first_all(observers.primary.as_mut())?;\n observers.primary.as_mut().pre_exec_all(state, input)?;\n let ret1 = self.primary.run_target(fuzzer, state, mgr, input)?;\n observers\n .primary\n .as_mut()\n .post_exec_all(state, input, &ret1)?;\n observers\n .differential\n .post_observe_first_all(observers.primary.as_mut())?;\n observers\n .differential\n", "meta": {"hash_id": "8212fd7e64e3f5c0a505c3d706b8093d3f34ba64a6307dc26f346e5bdd5b083d"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 4, "content": " .pre_observe_second_all(observers.secondary.as_mut())?;\n observers.secondary.as_mut().pre_exec_all(state, input)?;\n let ret2 = self.secondary.run_target(fuzzer, state, mgr, input)?;\n observers\n .secondary\n .as_mut()\n .post_exec_all(state, input, &ret2)?;\n observers\n .differential\n .post_observe_second_all(observers.secondary.as_mut())?;\n if ret1 == ret2 {\n Ok(ret1)\n } else {\n // We found a diff in the exit codes!\n Ok(ExitKind::Diff {\n primary: ret1.into(),\n secondary: ret2.into(),\n })\n }\n }\n}\n\n", "meta": {"hash_id": "7ff948f6e42f2b1f05ad1634d86d17ea3bfc04f4cf939685b8ea88d5d4fe8a68"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 5, "content": "/// Proxy the observers of the inner executors\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(\n bound = \"A: serde::Serialize + serde::de::DeserializeOwned, B: serde::Serialize + serde::de::DeserializeOwned, DOT: serde::Serialize + serde::de::DeserializeOwned\"\n)]\npub struct ProxyObserversTuple {\n primary: OwnedMutPtr,\n secondary: OwnedMutPtr,\n differential: DOT,\n}\n\nimpl ObserversTuple for ProxyObserversTuple\nwhere\n A: ObserversTuple,\n B: ObserversTuple,\n DOT: DifferentialObserversTuple,\n S: UsesInput,\n{\n fn pre_exec_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_all(state, input)\n }\n\n", "meta": {"hash_id": "ac03d6654206423c89768b824b6f78ba6682d00a1d73a763884327916380f90d"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 6, "content": " fn post_exec_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential.post_exec_all(state, input, exit_kind)\n }\n\n fn pre_exec_child_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_child_all(state, input)\n }\n\n fn post_exec_child_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential\n .post_exec_child_all(state, input, exit_kind)\n }\n\n", "meta": {"hash_id": "2da2540d0bd2f407fb961c5a89990ffb18fa998a45a375a72a2e6039f04f23ae"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 7, "content": " /// Returns true if a `stdout` observer was added to the list\n #[inline]\n fn observes_stdout(&self) -> bool {\n self.primary.as_ref().observes_stdout() || self.secondary.as_ref().observes_stdout()\n }\n /// Returns true if a `stderr` observer was added to the list\n #[inline]\n fn observes_stderr(&self) -> bool {\n self.primary.as_ref().observes_stderr() || self.secondary.as_ref().observes_stderr()\n }\n\n", "meta": {"hash_id": "a4b2561ef372396188c9070513f9b025537bee1c2280b9c45801c89e2b790e88"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 8, "content": " /// Runs `observe_stdout` for all stdout observers in the list\n fn observe_stdout(&mut self, stdout: &[u8]) {\n self.primary.as_mut().observe_stderr(stdout);\n self.secondary.as_mut().observe_stderr(stdout);\n }\n\n /// Runs `observe_stderr` for all stderr observers in the list\n fn observe_stderr(&mut self, stderr: &[u8]) {\n self.primary.as_mut().observe_stderr(stderr);\n self.secondary.as_mut().observe_stderr(stderr);\n }\n}\n\n", "meta": {"hash_id": "1f665ca6497314a56e81d1f0f2062767653416a44917b60abe8b47d753e25c0e"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 9, "content": "impl MatchName for ProxyObserversTuple\nwhere\n A: MatchName,\n B: MatchName,\n DOT: MatchName,\n{\n fn match_name(&self, name: &str) -> Option<&T> {\n if let Some(t) = self.primary.as_ref().match_name::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_ref().match_name::(name) {\n Some(t)\n } else {\n self.differential.match_name::(name)\n }\n }\n fn match_name_mut(&mut self, name: &str) -> Option<&mut T> {\n if let Some(t) = self.primary.as_mut().match_name_mut::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_mut().match_name_mut::(name) {\n Some(t)\n } else {\n self.differential.match_name_mut::(name)\n }\n }\n}\n\n", "meta": {"hash_id": "7e7cf8b506b40453c8145820334097fff6588755c65e8f6e2f91a2ee1945c8c4"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 10, "content": "impl ProxyObserversTuple {\n fn set(&mut self, primary: &A, secondary: &B) {\n self.primary = OwnedMutPtr::Ptr(ptr::from_ref(primary) as *mut A);\n self.secondary = OwnedMutPtr::Ptr(ptr::from_ref(secondary) as *mut B);\n }\n}\n\nimpl UsesObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n type Observers = ProxyObserversTuple;\n}\n\nimpl UsesState for DiffExecutor\nwhere\n A: UsesState,\n B: UsesState,\n{\n type State = A::State;\n}\n\n", "meta": {"hash_id": "550516fe92d909ddd8852d575d6835068ef6eadd0dab63c8d7a12aac05358fac"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 11, "content": "impl HasObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n #[inline]\n fn observers(&self) -> &ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_ref().unwrap()\n }\n }\n\n", "meta": {"hash_id": "345139116b6c6398c3d537585e6c6ab953689ed797ac29ea08da319b04a1ca7c"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 12, "content": " #[inline]\n fn observers_mut(&mut self) -> &mut ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_mut().unwrap()\n }\n }\n}\n", "meta": {"hash_id": "a9628ccfa791966c7723bd338d823b3f80b182bd340a569b66658c8969cd5c80"}}]}], "golden_chunks": [{"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 0, "content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\n", "meta": {"hash_id": "fd2b8aaa5316a99fe16981af3ff296754aef33bb48b58092ccc933117a0ad96c"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl/src/executors/differential.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 13, "source": "codebases/jsonl"}} +{"query": "How do you create a new DiffExecutor instance?", "answer": "A new DiffExecutor instance is created using the `new` method:\n```rust\npub fn new(primary: A, secondary: B, observers: DOT) -> Self\n```\nIt takes the primary executor, secondary executor, and a differential observers tuple as arguments.", "golden_doc_uuids": ["5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145"], "golden_chunk_uuids": [["5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", 1]], "golden_documents": [{"uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\nimpl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n .pre_observe_first_all(observers.primary.as_mut())?;\n observers.primary.as_mut().pre_exec_all(state, input)?;\n let ret1 = self.primary.run_target(fuzzer, state, mgr, input)?;\n observers\n .primary\n .as_mut()\n .post_exec_all(state, input, &ret1)?;\n observers\n .differential\n .post_observe_first_all(observers.primary.as_mut())?;\n observers\n .differential\n .pre_observe_second_all(observers.secondary.as_mut())?;\n observers.secondary.as_mut().pre_exec_all(state, input)?;\n let ret2 = self.secondary.run_target(fuzzer, state, mgr, input)?;\n observers\n .secondary\n .as_mut()\n .post_exec_all(state, input, &ret2)?;\n observers\n .differential\n .post_observe_second_all(observers.secondary.as_mut())?;\n if ret1 == ret2 {\n Ok(ret1)\n } else {\n // We found a diff in the exit codes!\n Ok(ExitKind::Diff {\n primary: ret1.into(),\n secondary: ret2.into(),\n })\n }\n }\n}\n\n/// Proxy the observers of the inner executors\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(\n bound = \"A: serde::Serialize + serde::de::DeserializeOwned, B: serde::Serialize + serde::de::DeserializeOwned, DOT: serde::Serialize + serde::de::DeserializeOwned\"\n)]\npub struct ProxyObserversTuple {\n primary: OwnedMutPtr,\n secondary: OwnedMutPtr,\n differential: DOT,\n}\n\nimpl ObserversTuple for ProxyObserversTuple\nwhere\n A: ObserversTuple,\n B: ObserversTuple,\n DOT: DifferentialObserversTuple,\n S: UsesInput,\n{\n fn pre_exec_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_all(state, input)\n }\n\n fn post_exec_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential.post_exec_all(state, input, exit_kind)\n }\n\n fn pre_exec_child_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_child_all(state, input)\n }\n\n fn post_exec_child_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential\n .post_exec_child_all(state, input, exit_kind)\n }\n\n /// Returns true if a `stdout` observer was added to the list\n #[inline]\n fn observes_stdout(&self) -> bool {\n self.primary.as_ref().observes_stdout() || self.secondary.as_ref().observes_stdout()\n }\n /// Returns true if a `stderr` observer was added to the list\n #[inline]\n fn observes_stderr(&self) -> bool {\n self.primary.as_ref().observes_stderr() || self.secondary.as_ref().observes_stderr()\n }\n\n /// Runs `observe_stdout` for all stdout observers in the list\n fn observe_stdout(&mut self, stdout: &[u8]) {\n self.primary.as_mut().observe_stderr(stdout);\n self.secondary.as_mut().observe_stderr(stdout);\n }\n\n /// Runs `observe_stderr` for all stderr observers in the list\n fn observe_stderr(&mut self, stderr: &[u8]) {\n self.primary.as_mut().observe_stderr(stderr);\n self.secondary.as_mut().observe_stderr(stderr);\n }\n}\n\nimpl MatchName for ProxyObserversTuple\nwhere\n A: MatchName,\n B: MatchName,\n DOT: MatchName,\n{\n fn match_name(&self, name: &str) -> Option<&T> {\n if let Some(t) = self.primary.as_ref().match_name::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_ref().match_name::(name) {\n Some(t)\n } else {\n self.differential.match_name::(name)\n }\n }\n fn match_name_mut(&mut self, name: &str) -> Option<&mut T> {\n if let Some(t) = self.primary.as_mut().match_name_mut::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_mut().match_name_mut::(name) {\n Some(t)\n } else {\n self.differential.match_name_mut::(name)\n }\n }\n}\n\nimpl ProxyObserversTuple {\n fn set(&mut self, primary: &A, secondary: &B) {\n self.primary = OwnedMutPtr::Ptr(ptr::from_ref(primary) as *mut A);\n self.secondary = OwnedMutPtr::Ptr(ptr::from_ref(secondary) as *mut B);\n }\n}\n\nimpl UsesObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n type Observers = ProxyObserversTuple;\n}\n\nimpl UsesState for DiffExecutor\nwhere\n A: UsesState,\n B: UsesState,\n{\n type State = A::State;\n}\n\nimpl HasObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n #[inline]\n fn observers(&self) -> &ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_ref().unwrap()\n }\n }\n\n #[inline]\n fn observers_mut(&mut self) -> &mut ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_mut().unwrap()\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl/src/executors/differential.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 13, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 0, "content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\n", "meta": {"hash_id": "fd2b8aaa5316a99fe16981af3ff296754aef33bb48b58092ccc933117a0ad96c"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 1, "content": "impl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n", "meta": {"hash_id": "6656f04fb80fde607005ee204f75cbaf16dc2b205c50269d122b4d923e104fe1"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 2, "content": " /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n", "meta": {"hash_id": "2aec47965f03eb65b6c179dc5a4b86b42dde0933f32e694bb69eff27f74d2012"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 3, "content": " .pre_observe_first_all(observers.primary.as_mut())?;\n observers.primary.as_mut().pre_exec_all(state, input)?;\n let ret1 = self.primary.run_target(fuzzer, state, mgr, input)?;\n observers\n .primary\n .as_mut()\n .post_exec_all(state, input, &ret1)?;\n observers\n .differential\n .post_observe_first_all(observers.primary.as_mut())?;\n observers\n .differential\n", "meta": {"hash_id": "8212fd7e64e3f5c0a505c3d706b8093d3f34ba64a6307dc26f346e5bdd5b083d"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 4, "content": " .pre_observe_second_all(observers.secondary.as_mut())?;\n observers.secondary.as_mut().pre_exec_all(state, input)?;\n let ret2 = self.secondary.run_target(fuzzer, state, mgr, input)?;\n observers\n .secondary\n .as_mut()\n .post_exec_all(state, input, &ret2)?;\n observers\n .differential\n .post_observe_second_all(observers.secondary.as_mut())?;\n if ret1 == ret2 {\n Ok(ret1)\n } else {\n // We found a diff in the exit codes!\n Ok(ExitKind::Diff {\n primary: ret1.into(),\n secondary: ret2.into(),\n })\n }\n }\n}\n\n", "meta": {"hash_id": "7ff948f6e42f2b1f05ad1634d86d17ea3bfc04f4cf939685b8ea88d5d4fe8a68"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 5, "content": "/// Proxy the observers of the inner executors\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(\n bound = \"A: serde::Serialize + serde::de::DeserializeOwned, B: serde::Serialize + serde::de::DeserializeOwned, DOT: serde::Serialize + serde::de::DeserializeOwned\"\n)]\npub struct ProxyObserversTuple {\n primary: OwnedMutPtr,\n secondary: OwnedMutPtr,\n differential: DOT,\n}\n\nimpl ObserversTuple for ProxyObserversTuple\nwhere\n A: ObserversTuple,\n B: ObserversTuple,\n DOT: DifferentialObserversTuple,\n S: UsesInput,\n{\n fn pre_exec_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_all(state, input)\n }\n\n", "meta": {"hash_id": "ac03d6654206423c89768b824b6f78ba6682d00a1d73a763884327916380f90d"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 6, "content": " fn post_exec_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential.post_exec_all(state, input, exit_kind)\n }\n\n fn pre_exec_child_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_child_all(state, input)\n }\n\n fn post_exec_child_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential\n .post_exec_child_all(state, input, exit_kind)\n }\n\n", "meta": {"hash_id": "2da2540d0bd2f407fb961c5a89990ffb18fa998a45a375a72a2e6039f04f23ae"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 7, "content": " /// Returns true if a `stdout` observer was added to the list\n #[inline]\n fn observes_stdout(&self) -> bool {\n self.primary.as_ref().observes_stdout() || self.secondary.as_ref().observes_stdout()\n }\n /// Returns true if a `stderr` observer was added to the list\n #[inline]\n fn observes_stderr(&self) -> bool {\n self.primary.as_ref().observes_stderr() || self.secondary.as_ref().observes_stderr()\n }\n\n", "meta": {"hash_id": "a4b2561ef372396188c9070513f9b025537bee1c2280b9c45801c89e2b790e88"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 8, "content": " /// Runs `observe_stdout` for all stdout observers in the list\n fn observe_stdout(&mut self, stdout: &[u8]) {\n self.primary.as_mut().observe_stderr(stdout);\n self.secondary.as_mut().observe_stderr(stdout);\n }\n\n /// Runs `observe_stderr` for all stderr observers in the list\n fn observe_stderr(&mut self, stderr: &[u8]) {\n self.primary.as_mut().observe_stderr(stderr);\n self.secondary.as_mut().observe_stderr(stderr);\n }\n}\n\n", "meta": {"hash_id": "1f665ca6497314a56e81d1f0f2062767653416a44917b60abe8b47d753e25c0e"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 9, "content": "impl MatchName for ProxyObserversTuple\nwhere\n A: MatchName,\n B: MatchName,\n DOT: MatchName,\n{\n fn match_name(&self, name: &str) -> Option<&T> {\n if let Some(t) = self.primary.as_ref().match_name::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_ref().match_name::(name) {\n Some(t)\n } else {\n self.differential.match_name::(name)\n }\n }\n fn match_name_mut(&mut self, name: &str) -> Option<&mut T> {\n if let Some(t) = self.primary.as_mut().match_name_mut::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_mut().match_name_mut::(name) {\n Some(t)\n } else {\n self.differential.match_name_mut::(name)\n }\n }\n}\n\n", "meta": {"hash_id": "7e7cf8b506b40453c8145820334097fff6588755c65e8f6e2f91a2ee1945c8c4"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 10, "content": "impl ProxyObserversTuple {\n fn set(&mut self, primary: &A, secondary: &B) {\n self.primary = OwnedMutPtr::Ptr(ptr::from_ref(primary) as *mut A);\n self.secondary = OwnedMutPtr::Ptr(ptr::from_ref(secondary) as *mut B);\n }\n}\n\nimpl UsesObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n type Observers = ProxyObserversTuple;\n}\n\nimpl UsesState for DiffExecutor\nwhere\n A: UsesState,\n B: UsesState,\n{\n type State = A::State;\n}\n\n", "meta": {"hash_id": "550516fe92d909ddd8852d575d6835068ef6eadd0dab63c8d7a12aac05358fac"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 11, "content": "impl HasObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n #[inline]\n fn observers(&self) -> &ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_ref().unwrap()\n }\n }\n\n", "meta": {"hash_id": "345139116b6c6398c3d537585e6c6ab953689ed797ac29ea08da319b04a1ca7c"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 12, "content": " #[inline]\n fn observers_mut(&mut self) -> &mut ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_mut().unwrap()\n }\n }\n}\n", "meta": {"hash_id": "a9628ccfa791966c7723bd338d823b3f80b182bd340a569b66658c8969cd5c80"}}]}], "golden_chunks": [{"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 1, "content": "impl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n", "meta": {"hash_id": "6656f04fb80fde607005ee204f75cbaf16dc2b205c50269d122b4d923e104fe1"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl/src/executors/differential.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 13, "source": "codebases/jsonl"}} +{"query": "What happens in the `run_target` method of the DiffExecutor?", "answer": "The `run_target` method does the following:\n1. Updates the observers by calling `self.observers()`.\n2. Calls `pre_observe_first_all` on the differential observers for the primary executor.\n3. Calls `pre_exec_all` on the primary observers.\n4. Runs the primary executor's `run_target` method.\n5. Calls `post_exec_all` on the primary observers.\n6. Calls `post_observe_first_all` on the differential observers for the primary executor.\n7. Calls `pre_observe_second_all` on the differential observers for the secondary executor.\n8. Calls `pre_exec_all` on the secondary observers.\n9. Runs the secondary executor's `run_target` method.\n10. Calls `post_exec_all` on the secondary observers.\n11. Calls `post_observe_second_all` on the differential observers for the secondary executor.\n12. Compares the exit kinds of the primary and secondary executors. If they are the same, returns the exit kind. If they are different, returns an `ExitKind::Diff` containing both exit kinds.", "golden_doc_uuids": ["5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145"], "golden_chunk_uuids": [["5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", 2], ["5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", 1], ["5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", 0]], "golden_documents": [{"uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\nimpl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n .pre_observe_first_all(observers.primary.as_mut())?;\n observers.primary.as_mut().pre_exec_all(state, input)?;\n let ret1 = self.primary.run_target(fuzzer, state, mgr, input)?;\n observers\n .primary\n .as_mut()\n .post_exec_all(state, input, &ret1)?;\n observers\n .differential\n .post_observe_first_all(observers.primary.as_mut())?;\n observers\n .differential\n .pre_observe_second_all(observers.secondary.as_mut())?;\n observers.secondary.as_mut().pre_exec_all(state, input)?;\n let ret2 = self.secondary.run_target(fuzzer, state, mgr, input)?;\n observers\n .secondary\n .as_mut()\n .post_exec_all(state, input, &ret2)?;\n observers\n .differential\n .post_observe_second_all(observers.secondary.as_mut())?;\n if ret1 == ret2 {\n Ok(ret1)\n } else {\n // We found a diff in the exit codes!\n Ok(ExitKind::Diff {\n primary: ret1.into(),\n secondary: ret2.into(),\n })\n }\n }\n}\n\n/// Proxy the observers of the inner executors\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(\n bound = \"A: serde::Serialize + serde::de::DeserializeOwned, B: serde::Serialize + serde::de::DeserializeOwned, DOT: serde::Serialize + serde::de::DeserializeOwned\"\n)]\npub struct ProxyObserversTuple {\n primary: OwnedMutPtr,\n secondary: OwnedMutPtr,\n differential: DOT,\n}\n\nimpl ObserversTuple for ProxyObserversTuple\nwhere\n A: ObserversTuple,\n B: ObserversTuple,\n DOT: DifferentialObserversTuple,\n S: UsesInput,\n{\n fn pre_exec_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_all(state, input)\n }\n\n fn post_exec_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential.post_exec_all(state, input, exit_kind)\n }\n\n fn pre_exec_child_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_child_all(state, input)\n }\n\n fn post_exec_child_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential\n .post_exec_child_all(state, input, exit_kind)\n }\n\n /// Returns true if a `stdout` observer was added to the list\n #[inline]\n fn observes_stdout(&self) -> bool {\n self.primary.as_ref().observes_stdout() || self.secondary.as_ref().observes_stdout()\n }\n /// Returns true if a `stderr` observer was added to the list\n #[inline]\n fn observes_stderr(&self) -> bool {\n self.primary.as_ref().observes_stderr() || self.secondary.as_ref().observes_stderr()\n }\n\n /// Runs `observe_stdout` for all stdout observers in the list\n fn observe_stdout(&mut self, stdout: &[u8]) {\n self.primary.as_mut().observe_stderr(stdout);\n self.secondary.as_mut().observe_stderr(stdout);\n }\n\n /// Runs `observe_stderr` for all stderr observers in the list\n fn observe_stderr(&mut self, stderr: &[u8]) {\n self.primary.as_mut().observe_stderr(stderr);\n self.secondary.as_mut().observe_stderr(stderr);\n }\n}\n\nimpl MatchName for ProxyObserversTuple\nwhere\n A: MatchName,\n B: MatchName,\n DOT: MatchName,\n{\n fn match_name(&self, name: &str) -> Option<&T> {\n if let Some(t) = self.primary.as_ref().match_name::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_ref().match_name::(name) {\n Some(t)\n } else {\n self.differential.match_name::(name)\n }\n }\n fn match_name_mut(&mut self, name: &str) -> Option<&mut T> {\n if let Some(t) = self.primary.as_mut().match_name_mut::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_mut().match_name_mut::(name) {\n Some(t)\n } else {\n self.differential.match_name_mut::(name)\n }\n }\n}\n\nimpl ProxyObserversTuple {\n fn set(&mut self, primary: &A, secondary: &B) {\n self.primary = OwnedMutPtr::Ptr(ptr::from_ref(primary) as *mut A);\n self.secondary = OwnedMutPtr::Ptr(ptr::from_ref(secondary) as *mut B);\n }\n}\n\nimpl UsesObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n type Observers = ProxyObserversTuple;\n}\n\nimpl UsesState for DiffExecutor\nwhere\n A: UsesState,\n B: UsesState,\n{\n type State = A::State;\n}\n\nimpl HasObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n #[inline]\n fn observers(&self) -> &ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_ref().unwrap()\n }\n }\n\n #[inline]\n fn observers_mut(&mut self) -> &mut ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_mut().unwrap()\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl/src/executors/differential.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 13, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 0, "content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\n", "meta": {"hash_id": "fd2b8aaa5316a99fe16981af3ff296754aef33bb48b58092ccc933117a0ad96c"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 1, "content": "impl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n", "meta": {"hash_id": "6656f04fb80fde607005ee204f75cbaf16dc2b205c50269d122b4d923e104fe1"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 2, "content": " /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n", "meta": {"hash_id": "2aec47965f03eb65b6c179dc5a4b86b42dde0933f32e694bb69eff27f74d2012"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 3, "content": " .pre_observe_first_all(observers.primary.as_mut())?;\n observers.primary.as_mut().pre_exec_all(state, input)?;\n let ret1 = self.primary.run_target(fuzzer, state, mgr, input)?;\n observers\n .primary\n .as_mut()\n .post_exec_all(state, input, &ret1)?;\n observers\n .differential\n .post_observe_first_all(observers.primary.as_mut())?;\n observers\n .differential\n", "meta": {"hash_id": "8212fd7e64e3f5c0a505c3d706b8093d3f34ba64a6307dc26f346e5bdd5b083d"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 4, "content": " .pre_observe_second_all(observers.secondary.as_mut())?;\n observers.secondary.as_mut().pre_exec_all(state, input)?;\n let ret2 = self.secondary.run_target(fuzzer, state, mgr, input)?;\n observers\n .secondary\n .as_mut()\n .post_exec_all(state, input, &ret2)?;\n observers\n .differential\n .post_observe_second_all(observers.secondary.as_mut())?;\n if ret1 == ret2 {\n Ok(ret1)\n } else {\n // We found a diff in the exit codes!\n Ok(ExitKind::Diff {\n primary: ret1.into(),\n secondary: ret2.into(),\n })\n }\n }\n}\n\n", "meta": {"hash_id": "7ff948f6e42f2b1f05ad1634d86d17ea3bfc04f4cf939685b8ea88d5d4fe8a68"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 5, "content": "/// Proxy the observers of the inner executors\n#[derive(Serialize, Deserialize, Debug)]\n#[serde(\n bound = \"A: serde::Serialize + serde::de::DeserializeOwned, B: serde::Serialize + serde::de::DeserializeOwned, DOT: serde::Serialize + serde::de::DeserializeOwned\"\n)]\npub struct ProxyObserversTuple {\n primary: OwnedMutPtr,\n secondary: OwnedMutPtr,\n differential: DOT,\n}\n\nimpl ObserversTuple for ProxyObserversTuple\nwhere\n A: ObserversTuple,\n B: ObserversTuple,\n DOT: DifferentialObserversTuple,\n S: UsesInput,\n{\n fn pre_exec_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_all(state, input)\n }\n\n", "meta": {"hash_id": "ac03d6654206423c89768b824b6f78ba6682d00a1d73a763884327916380f90d"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 6, "content": " fn post_exec_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential.post_exec_all(state, input, exit_kind)\n }\n\n fn pre_exec_child_all(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.differential.pre_exec_child_all(state, input)\n }\n\n fn post_exec_child_all(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.differential\n .post_exec_child_all(state, input, exit_kind)\n }\n\n", "meta": {"hash_id": "2da2540d0bd2f407fb961c5a89990ffb18fa998a45a375a72a2e6039f04f23ae"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 7, "content": " /// Returns true if a `stdout` observer was added to the list\n #[inline]\n fn observes_stdout(&self) -> bool {\n self.primary.as_ref().observes_stdout() || self.secondary.as_ref().observes_stdout()\n }\n /// Returns true if a `stderr` observer was added to the list\n #[inline]\n fn observes_stderr(&self) -> bool {\n self.primary.as_ref().observes_stderr() || self.secondary.as_ref().observes_stderr()\n }\n\n", "meta": {"hash_id": "a4b2561ef372396188c9070513f9b025537bee1c2280b9c45801c89e2b790e88"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 8, "content": " /// Runs `observe_stdout` for all stdout observers in the list\n fn observe_stdout(&mut self, stdout: &[u8]) {\n self.primary.as_mut().observe_stderr(stdout);\n self.secondary.as_mut().observe_stderr(stdout);\n }\n\n /// Runs `observe_stderr` for all stderr observers in the list\n fn observe_stderr(&mut self, stderr: &[u8]) {\n self.primary.as_mut().observe_stderr(stderr);\n self.secondary.as_mut().observe_stderr(stderr);\n }\n}\n\n", "meta": {"hash_id": "1f665ca6497314a56e81d1f0f2062767653416a44917b60abe8b47d753e25c0e"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 9, "content": "impl MatchName for ProxyObserversTuple\nwhere\n A: MatchName,\n B: MatchName,\n DOT: MatchName,\n{\n fn match_name(&self, name: &str) -> Option<&T> {\n if let Some(t) = self.primary.as_ref().match_name::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_ref().match_name::(name) {\n Some(t)\n } else {\n self.differential.match_name::(name)\n }\n }\n fn match_name_mut(&mut self, name: &str) -> Option<&mut T> {\n if let Some(t) = self.primary.as_mut().match_name_mut::(name) {\n Some(t)\n } else if let Some(t) = self.secondary.as_mut().match_name_mut::(name) {\n Some(t)\n } else {\n self.differential.match_name_mut::(name)\n }\n }\n}\n\n", "meta": {"hash_id": "7e7cf8b506b40453c8145820334097fff6588755c65e8f6e2f91a2ee1945c8c4"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 10, "content": "impl ProxyObserversTuple {\n fn set(&mut self, primary: &A, secondary: &B) {\n self.primary = OwnedMutPtr::Ptr(ptr::from_ref(primary) as *mut A);\n self.secondary = OwnedMutPtr::Ptr(ptr::from_ref(secondary) as *mut B);\n }\n}\n\nimpl UsesObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n type Observers = ProxyObserversTuple;\n}\n\nimpl UsesState for DiffExecutor\nwhere\n A: UsesState,\n B: UsesState,\n{\n type State = A::State;\n}\n\n", "meta": {"hash_id": "550516fe92d909ddd8852d575d6835068ef6eadd0dab63c8d7a12aac05358fac"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 11, "content": "impl HasObservers for DiffExecutor\nwhere\n A: HasObservers,\n B: HasObservers,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n DOT: DifferentialObserversTuple,\n{\n #[inline]\n fn observers(&self) -> &ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_ref().unwrap()\n }\n }\n\n", "meta": {"hash_id": "345139116b6c6398c3d537585e6c6ab953689ed797ac29ea08da319b04a1ca7c"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 12, "content": " #[inline]\n fn observers_mut(&mut self) -> &mut ProxyObserversTuple {\n unsafe {\n self.observers\n .get()\n .as_mut()\n .unwrap()\n .set(self.primary.observers(), self.secondary.observers());\n self.observers.get().as_mut().unwrap()\n }\n }\n}\n", "meta": {"hash_id": "a9628ccfa791966c7723bd338d823b3f80b182bd340a569b66658c8969cd5c80"}}]}], "golden_chunks": [{"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 2, "content": " /// Retrieve the primary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn primary(&mut self) -> &mut A {\n &mut self.primary\n }\n\n /// Retrieve the secondary `Executor` that is wrapped by this `DiffExecutor`.\n pub fn secondary(&mut self) -> &mut B {\n &mut self.secondary\n }\n}\n\nimpl Executor for DiffExecutor\nwhere\n A: Executor + HasObservers,\n B: Executor + HasObservers,\n EM: UsesState,\n DOT: DifferentialObserversTuple,\n Z: UsesState,\n{\n fn run_target(\n &mut self,\n fuzzer: &mut Z,\n state: &mut Self::State,\n mgr: &mut EM,\n input: &Self::Input,\n ) -> Result {\n self.observers(); // update in advance\n let observers = self.observers.get_mut();\n observers\n .differential\n", "meta": {"hash_id": "2aec47965f03eb65b6c179dc5a4b86b42dde0933f32e694bb69eff27f74d2012"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 1, "content": "impl DiffExecutor {\n /// Create a new `DiffExecutor`, wrapping the given `executor`s.\n pub fn new(primary: A, secondary: B, observers: DOT) -> Self\n where\n A: UsesState + HasObservers,\n B: UsesState + HasObservers,\n DOT: DifferentialObserversTuple,\n OTA: ObserversTuple,\n OTB: ObserversTuple,\n {\n Self {\n primary,\n secondary,\n observers: UnsafeCell::new(ProxyObserversTuple {\n primary: OwnedMutPtr::Ptr(ptr::null_mut()),\n secondary: OwnedMutPtr::Ptr(ptr::null_mut()),\n differential: observers,\n }),\n }\n }\n\n", "meta": {"hash_id": "6656f04fb80fde607005ee204f75cbaf16dc2b205c50269d122b4d923e104fe1"}}, {"doc_uuid": "5e4c01057a10732d34784af2a97bee9d173863f043b9901de8ef7f57bc590145", "index": 0, "content": "//! Executor for differential fuzzing.\n//! It wraps two executors that will be run after each other with the same input.\n//! In comparison to the [`crate::executors::CombinedExecutor`] it also runs the secondary executor in `run_target`.\n//!\nuse core::{cell::UnsafeCell, fmt::Debug, ptr};\n\nuse libafl_bolts::{ownedref::OwnedMutPtr, tuples::MatchName};\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n executors::{Executor, ExitKind, HasObservers},\n inputs::UsesInput,\n observers::{DifferentialObserversTuple, ObserversTuple, UsesObservers},\n state::UsesState,\n Error,\n};\n\n/// A [`DiffExecutor`] wraps a primary executor, forwarding its methods, and a secondary one\n#[derive(Debug)]\npub struct DiffExecutor {\n primary: A,\n secondary: B,\n observers: UnsafeCell>,\n}\n\n", "meta": {"hash_id": "fd2b8aaa5316a99fe16981af3ff296754aef33bb48b58092ccc933117a0ad96c"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl/src/executors/differential.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 13, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the SIGNALS and SIGNALS_PTR static variables?", "answer": "The SIGNALS and SIGNALS_PTR static variables are used as a coverage map to track which parts of the input have been exercised during fuzzing. SIGNALS is an array of 64 bytes initialized to 0, and SIGNALS_PTR is a mutable pointer to the SIGNALS array. The signals_set function is used to assign a signal to the signals map at a specific index.", "golden_doc_uuids": ["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c"], "golden_chunk_uuids": [["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", 1]], "golden_documents": [{"uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 0, "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n", "meta": {"hash_id": "aca7f4ac2377973a14467962bd97699e120ef964b371c2912cdecb28b54379ff"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 1, "content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n", "meta": {"hash_id": "e4f88cdc05e704a83ed94baadbfd4561a507f822c66d32443f1f49af3c9efe50"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 2, "content": " #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n", "meta": {"hash_id": "a55e11a9bab56f4eef35c02c7bbaf17861e06991f7f57438ea03d513d83f1030"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 3, "content": " // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n", "meta": {"hash_id": "b5cc47b917b7b6a4d4026359abdc24f7533b907f2bd8da92a5ef94e9ffdce7e4"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 4, "content": " // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n", "meta": {"hash_id": "63ec3bebbac7866631f9c53b1df7d980481d423bc13b348ddbaca707afe1d21c"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 5, "content": " // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n", "meta": {"hash_id": "65d6d5a4aef5ae950a53b1e76f8bf3c4def4aa8d0f8f724d6e763f7aa9af7506"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 6, "content": " // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"hash_id": "f3d5c873a1caaca47edcab9a0d23bbd5fc473f75766d701d14913494b6cfacda"}}]}], "golden_chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 1, "content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n", "meta": {"hash_id": "e4f88cdc05e704a83ed94baadbfd4561a507f822c66d32443f1f49af3c9efe50"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "How does the harness closure work?", "answer": "The harness closure takes a BytesInput and compares its target_bytes() against a goal byte slice. It iterates over the input and goal bytes using zip and take_while, setting signals in the SIGNALS map using signals_set for each matching byte. If the entire goal is matched, it either panics (on Unix) or writes 0 to a null pointer (on Windows) to trigger an artificial bug. Finally, it returns ExitKind::Ok.", "golden_doc_uuids": ["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c"], "golden_chunk_uuids": [["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", 1]], "golden_documents": [{"uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 0, "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n", "meta": {"hash_id": "aca7f4ac2377973a14467962bd97699e120ef964b371c2912cdecb28b54379ff"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 1, "content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n", "meta": {"hash_id": "e4f88cdc05e704a83ed94baadbfd4561a507f822c66d32443f1f49af3c9efe50"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 2, "content": " #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n", "meta": {"hash_id": "a55e11a9bab56f4eef35c02c7bbaf17861e06991f7f57438ea03d513d83f1030"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 3, "content": " // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n", "meta": {"hash_id": "b5cc47b917b7b6a4d4026359abdc24f7533b907f2bd8da92a5ef94e9ffdce7e4"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 4, "content": " // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n", "meta": {"hash_id": "63ec3bebbac7866631f9c53b1df7d980481d423bc13b348ddbaca707afe1d21c"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 5, "content": " // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n", "meta": {"hash_id": "65d6d5a4aef5ae950a53b1e76f8bf3c4def4aa8d0f8f724d6e763f7aa9af7506"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 6, "content": " // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"hash_id": "f3d5c873a1caaca47edcab9a0d23bbd5fc473f75766d701d14913494b6cfacda"}}]}], "golden_chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 1, "content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n", "meta": {"hash_id": "e4f88cdc05e704a83ed94baadbfd4561a507f822c66d32443f1f49af3c9efe50"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the StdMapObserver?", "answer": "The StdMapObserver is used to create an observation channel using the SIGNALS map. It is created using unsafe code to get a mutable pointer to the SIGNALS array and its length. This observer is used to track coverage during fuzzing.", "golden_doc_uuids": ["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c"], "golden_chunk_uuids": [["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", 2]], "golden_documents": [{"uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 0, "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n", "meta": {"hash_id": "aca7f4ac2377973a14467962bd97699e120ef964b371c2912cdecb28b54379ff"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 1, "content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n", "meta": {"hash_id": "e4f88cdc05e704a83ed94baadbfd4561a507f822c66d32443f1f49af3c9efe50"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 2, "content": " #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n", "meta": {"hash_id": "a55e11a9bab56f4eef35c02c7bbaf17861e06991f7f57438ea03d513d83f1030"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 3, "content": " // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n", "meta": {"hash_id": "b5cc47b917b7b6a4d4026359abdc24f7533b907f2bd8da92a5ef94e9ffdce7e4"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 4, "content": " // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n", "meta": {"hash_id": "63ec3bebbac7866631f9c53b1df7d980481d423bc13b348ddbaca707afe1d21c"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 5, "content": " // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n", "meta": {"hash_id": "65d6d5a4aef5ae950a53b1e76f8bf3c4def4aa8d0f8f724d6e763f7aa9af7506"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 6, "content": " // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"hash_id": "f3d5c873a1caaca47edcab9a0d23bbd5fc473f75766d701d14913494b6cfacda"}}]}], "golden_chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 2, "content": " #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n", "meta": {"hash_id": "a55e11a9bab56f4eef35c02c7bbaf17861e06991f7f57438ea03d513d83f1030"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "What feedbacks are used in this fuzzer?", "answer": "The fuzzer uses two feedbacks: MaxMapFeedback and CrashFeedback. MaxMapFeedback is used to rate the interestingness of an input based on the coverage observed by the StdMapObserver. CrashFeedback is used to determine if an input is a solution (i.e., triggers a crash).", "golden_doc_uuids": ["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c"], "golden_chunk_uuids": [["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", 0]], "golden_documents": [{"uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 0, "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n", "meta": {"hash_id": "aca7f4ac2377973a14467962bd97699e120ef964b371c2912cdecb28b54379ff"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 1, "content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n", "meta": {"hash_id": "e4f88cdc05e704a83ed94baadbfd4561a507f822c66d32443f1f49af3c9efe50"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 2, "content": " #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n", "meta": {"hash_id": "a55e11a9bab56f4eef35c02c7bbaf17861e06991f7f57438ea03d513d83f1030"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 3, "content": " // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n", "meta": {"hash_id": "b5cc47b917b7b6a4d4026359abdc24f7533b907f2bd8da92a5ef94e9ffdce7e4"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 4, "content": " // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n", "meta": {"hash_id": "63ec3bebbac7866631f9c53b1df7d980481d423bc13b348ddbaca707afe1d21c"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 5, "content": " // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n", "meta": {"hash_id": "65d6d5a4aef5ae950a53b1e76f8bf3c4def4aa8d0f8f724d6e763f7aa9af7506"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 6, "content": " // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"hash_id": "f3d5c873a1caaca47edcab9a0d23bbd5fc473f75766d701d14913494b6cfacda"}}]}], "golden_chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 0, "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n", "meta": {"hash_id": "aca7f4ac2377973a14467962bd97699e120ef964b371c2912cdecb28b54379ff"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "How is the initial corpus generated?", "answer": "The initial corpus is generated by evaluating a single input consisting of the byte 'a' using fuzzer.evaluate_input. This adds the input to the corpus and evaluates it using the harness.", "golden_doc_uuids": ["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c"], "golden_chunk_uuids": [["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", 5]], "golden_documents": [{"uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 0, "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n", "meta": {"hash_id": "aca7f4ac2377973a14467962bd97699e120ef964b371c2912cdecb28b54379ff"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 1, "content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n", "meta": {"hash_id": "e4f88cdc05e704a83ed94baadbfd4561a507f822c66d32443f1f49af3c9efe50"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 2, "content": " #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n", "meta": {"hash_id": "a55e11a9bab56f4eef35c02c7bbaf17861e06991f7f57438ea03d513d83f1030"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 3, "content": " // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n", "meta": {"hash_id": "b5cc47b917b7b6a4d4026359abdc24f7533b907f2bd8da92a5ef94e9ffdce7e4"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 4, "content": " // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n", "meta": {"hash_id": "63ec3bebbac7866631f9c53b1df7d980481d423bc13b348ddbaca707afe1d21c"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 5, "content": " // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n", "meta": {"hash_id": "65d6d5a4aef5ae950a53b1e76f8bf3c4def4aa8d0f8f724d6e763f7aa9af7506"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 6, "content": " // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"hash_id": "f3d5c873a1caaca47edcab9a0d23bbd5fc473f75766d701d14913494b6cfacda"}}]}], "golden_chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 5, "content": " // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n", "meta": {"hash_id": "65d6d5a4aef5ae950a53b1e76f8bf3c4def4aa8d0f8f724d6e763f7aa9af7506"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "What stages are used in the fuzzer?", "answer": "The fuzzer uses two stages: StringIdentificationStage and StdMutationalStage. StringIdentificationStage is used to identify string categories and subcategories in the input, which can then be used by the mutators. StdMutationalStage is a transforming stage that applies the scheduled mutators to generate new inputs.", "golden_doc_uuids": ["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c"], "golden_chunk_uuids": [["78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", 6]], "golden_documents": [{"uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 0, "content": "#[cfg(windows)]\nuse std::ptr::write_volatile;\nuse std::{path::PathBuf, ptr::write};\n\n#[cfg(feature = \"tui\")]\nuse libafl::monitors::tui::{ui::TuiUI, TuiMonitor};\n#[cfg(not(feature = \"tui\"))]\nuse libafl::monitors::SimpleMonitor;\nuse libafl::{\n corpus::{InMemoryCorpus, OnDiskCorpus},\n events::SimpleEventManager,\n executors::{inprocess::InProcessExecutor, ExitKind},\n feedbacks::{CrashFeedback, MaxMapFeedback},\n fuzzer::{Fuzzer, StdFuzzer},\n inputs::{BytesInput, HasTargetBytes},\n mutators::{StdScheduledMutator, StringCategoryRandMutator, StringSubcategoryRandMutator},\n observers::StdMapObserver,\n schedulers::QueueScheduler,\n stages::{mutational::StdMutationalStage, StringIdentificationStage},\n state::StdState,\n Evaluator,\n};\nuse libafl_bolts::{current_nanos, rands::StdRand, tuples::tuple_list, AsSlice};\n\n", "meta": {"hash_id": "aca7f4ac2377973a14467962bd97699e120ef964b371c2912cdecb28b54379ff"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 1, "content": "/// Coverage map with explicit assignments due to the lack of instrumentation\nstatic mut SIGNALS: [u8; 64] = [0; 64];\nstatic mut SIGNALS_PTR: *mut u8 = unsafe { SIGNALS.as_mut_ptr() };\n\n/// Assign a signal to the signals map\nfn signals_set(idx: usize) {\n unsafe { write(SIGNALS_PTR.add(idx), 1) };\n}\n\n#[allow(clippy::similar_names, clippy::manual_assert)]\npub fn main() {\n // The closure that we want to fuzz\n let mut harness = |input: &BytesInput| {\n let target = input.target_bytes();\n let buf = target.as_slice();\n let goal = b\"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz\";\n let mut i = 0;\n for _ in buf.iter().zip(goal).take_while(|(b, c)| b == c) {\n signals_set(i);\n i += 1;\n }\n if i == goal.len() {\n #[cfg(unix)]\n panic!(\"Artificial bug triggered =)\");\n\n", "meta": {"hash_id": "e4f88cdc05e704a83ed94baadbfd4561a507f822c66d32443f1f49af3c9efe50"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 2, "content": " #[cfg(windows)]\n unsafe {\n write_volatile(0 as *mut u32, 0);\n }\n }\n ExitKind::Ok\n };\n\n // Create an observation channel using the signals map\n let observer = unsafe { StdMapObserver::from_mut_ptr(\"signals\", SIGNALS_PTR, SIGNALS.len()) };\n\n // Feedback to rate the interestingness of an input\n let mut feedback = MaxMapFeedback::new(&observer);\n\n // A feedback to choose if an input is a solution or not\n let mut objective = CrashFeedback::new();\n\n", "meta": {"hash_id": "a55e11a9bab56f4eef35c02c7bbaf17861e06991f7f57438ea03d513d83f1030"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 3, "content": " // create a State from scratch\n let mut state = StdState::new(\n // RNG\n StdRand::with_seed(current_nanos()),\n // Corpus that will be evolved, we keep it in memory for performance\n InMemoryCorpus::new(),\n // Corpus in which we store solutions (crashes in this example),\n // on disk so the user can get them after stopping the fuzzer\n OnDiskCorpus::new(PathBuf::from(\"./crashes\")).unwrap(),\n // States of the feedbacks.\n // The feedbacks can report the data that should persist in the State.\n &mut feedback,\n // Same for objective feedbacks\n &mut objective,\n )\n .unwrap();\n\n", "meta": {"hash_id": "b5cc47b917b7b6a4d4026359abdc24f7533b907f2bd8da92a5ef94e9ffdce7e4"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 4, "content": " // The Monitor trait define how the fuzzer stats are displayed to the user\n #[cfg(not(feature = \"tui\"))]\n let mon = SimpleMonitor::new(|s| println!(\"{s}\"));\n #[cfg(feature = \"tui\")]\n let ui = TuiUI::with_version(String::from(\"Baby Fuzzer\"), String::from(\"0.0.1\"), false);\n #[cfg(feature = \"tui\")]\n let mon = TuiMonitor::new(ui);\n\n // The event manager handle the various events generated during the fuzzing loop\n // such as the notification of the addition of a new item to the corpus\n let mut mgr = SimpleEventManager::new(mon);\n\n // A queue policy to get testcasess from the corpus\n let scheduler = QueueScheduler::new();\n\n // A fuzzer with feedbacks and a corpus scheduler\n let mut fuzzer = StdFuzzer::new(scheduler, feedback, objective);\n\n", "meta": {"hash_id": "63ec3bebbac7866631f9c53b1df7d980481d423bc13b348ddbaca707afe1d21c"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 5, "content": " // Create the executor for an in-process function with just one observer\n let mut executor = InProcessExecutor::new(\n &mut harness,\n tuple_list!(observer),\n &mut fuzzer,\n &mut state,\n &mut mgr,\n )\n .expect(\"Failed to create the Executor\");\n\n // Generate 8 initial inputs\n fuzzer\n .evaluate_input(\n &mut state,\n &mut executor,\n &mut mgr,\n BytesInput::new(vec![b'a']),\n )\n .unwrap();\n\n", "meta": {"hash_id": "65d6d5a4aef5ae950a53b1e76f8bf3c4def4aa8d0f8f724d6e763f7aa9af7506"}}, {"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 6, "content": " // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"hash_id": "f3d5c873a1caaca47edcab9a0d23bbd5fc473f75766d701d14913494b6cfacda"}}]}], "golden_chunks": [{"doc_uuid": "78cd6ead8e87695b47c2904e3027ae2b7251677caa5c5815b38c8756fe1a0b0c", "index": 6, "content": " // Setup a mutational stage with a basic bytes mutator\n let mutator = StdScheduledMutator::new(tuple_list!(\n StringCategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator,\n StringSubcategoryRandMutator\n ));\n let mut stages = tuple_list!(\n StringIdentificationStage::new(),\n StdMutationalStage::transforming(mutator)\n );\n\n fuzzer\n .fuzz_loop(&mut stages, &mut executor, &mut state, &mut mgr)\n .expect(\"Error in the fuzzing loop\");\n}\n", "meta": {"hash_id": "f3d5c873a1caaca47edcab9a0d23bbd5fc473f75766d701d14913494b6cfacda"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_unicode/src/main.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "What does the `OomObserver` struct do?", "answer": "The `OomObserver` struct is an implementation of the `Observer` trait in the `libafl` library. It detects if the target program would run out of memory or otherwise violate the permissible usage of `malloc` during fuzzing. This can be seen from the struct definition and its associated documentation comment:\n```rust\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n```", "golden_doc_uuids": ["2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd"], "golden_chunk_uuids": [["2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", 4]], "golden_documents": [{"uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "content": "use core::{ffi::c_void, fmt::Debug};\nuse std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};\n\nuse libafl::{\n events::EventFirer,\n executors::ExitKind,\n feedbacks::Feedback,\n inputs::UsesInput,\n observers::{Observer, ObserversTuple},\n state::State,\n Error,\n};\nuse libafl_bolts::Named;\nuse libc::SIGABRT;\nuse serde::{Deserialize, Serialize};\n\nextern \"C\" {\n fn libafl_check_malloc_size(ptr: *const c_void) -> usize;\n}\n\nstatic RUNNING: AtomicBool = AtomicBool::new(false);\nstatic OOMED: AtomicBool = AtomicBool::new(false);\nstatic RSS_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n// 2GB, which is the default\nstatic MALLOC_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n\nstatic MALLOC_SIZE: AtomicUsize = AtomicUsize::new(0);\n\n/// malloc hook which will be invoked if address sanitizer is present. Used to detect if the target makes a malloc call\n/// that will exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid freshly allocated pointers backed by allocations of `size`.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_malloc_hook(ptr: *const c_void, size: usize) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = match unsafe { libafl_check_malloc_size(ptr) } {\n 0 => size, // either the malloc size function didn't work or it's really zero-sized\n real => real,\n };\n\n let total = MALLOC_SIZE.fetch_add(size, Ordering::Relaxed) + size;\n if (size > MALLOC_MAX.load(Ordering::Relaxed) || total > RSS_MAX.load(Ordering::Relaxed))\n && !OOMED.swap(true, Ordering::Relaxed)\n {\n unsafe {\n // we need to kill the process in a way that immediately triggers the crash handler\n libc::raise(SIGABRT);\n }\n }\n }\n}\n\n/// free hook which will be invoked if ASAN is present. Used to detect if the target makes a malloc call that will\n/// exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid allocated pointers, about to be freed.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_free_hook(ptr: *const c_void) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = unsafe { libafl_check_malloc_size(ptr) };\n MALLOC_SIZE\n .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |existing| {\n Some(existing.saturating_sub(size))\n })\n .expect(\"must complete successfully\");\n }\n}\n\nconst OOM_OBS_NAME: &str = \"libfuzzer-like-oom\";\n\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n\nimpl Named for OomObserver {\n // strictly one name to prevent two from being registered\n fn name(&self) -> &str {\n OOM_OBS_NAME\n }\n}\n\nimpl Observer for OomObserver\nwhere\n S: UsesInput,\n{\n fn pre_exec(&mut self, _state: &mut S, _input: &S::Input) -> Result<(), Error> {\n OOMED.store(false, Ordering::Relaxed);\n // must reset for platforms which do not offer malloc tracking\n MALLOC_SIZE.store(0, Ordering::Relaxed);\n RUNNING.store(true, Ordering::Relaxed);\n Ok(())\n }\n\n fn post_exec(\n &mut self,\n _state: &mut S,\n _input: &S::Input,\n _exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n RUNNING.store(false, Ordering::Relaxed);\n self.oomed = OOMED.load(Ordering::Relaxed);\n Ok(())\n }\n\n fn pre_exec_child(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.pre_exec(state, input)\n }\n\n fn post_exec_child(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.post_exec(state, input, exit_kind)\n }\n}\n\n/// Feedback for the similarly named [`OomObserver`] to detect if the target crashed due to an observed OOM\n#[derive(Debug, Serialize, Deserialize, Copy, Clone, Default)]\npub struct OomFeedback;\n\nimpl OomFeedback {\n /// Whether the target OOM'd in the last execution\n pub fn oomed() -> bool {\n OOMED.load(Ordering::Relaxed)\n }\n}\n\nimpl Named for OomFeedback {\n fn name(&self) -> &str {\n \"oom\"\n }\n}\n\nimpl Feedback for OomFeedback\nwhere\n S: State,\n{\n fn is_interesting(\n &mut self,\n _state: &mut S,\n _manager: &mut EM,\n _input: &S::Input,\n _observers: &OT,\n _exit_kind: &ExitKind,\n ) -> Result\n where\n EM: EventFirer,\n OT: ObserversTuple,\n {\n Ok(Self::oomed())\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_targets/src/libfuzzer/observers/oom.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 0, "content": "use core::{ffi::c_void, fmt::Debug};\nuse std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};\n\nuse libafl::{\n events::EventFirer,\n executors::ExitKind,\n feedbacks::Feedback,\n inputs::UsesInput,\n observers::{Observer, ObserversTuple},\n state::State,\n Error,\n};\nuse libafl_bolts::Named;\nuse libc::SIGABRT;\nuse serde::{Deserialize, Serialize};\n\nextern \"C\" {\n fn libafl_check_malloc_size(ptr: *const c_void) -> usize;\n}\n\nstatic RUNNING: AtomicBool = AtomicBool::new(false);\nstatic OOMED: AtomicBool = AtomicBool::new(false);\nstatic RSS_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n// 2GB, which is the default\nstatic MALLOC_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n\nstatic MALLOC_SIZE: AtomicUsize = AtomicUsize::new(0);\n\n", "meta": {"hash_id": "694023d2052e8716c4842f203e5eab1e9c7f6c0a09f9ed2e5a028b6bb5115477"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 1, "content": "/// malloc hook which will be invoked if address sanitizer is present. Used to detect if the target makes a malloc call\n/// that will exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid freshly allocated pointers backed by allocations of `size`.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_malloc_hook(ptr: *const c_void, size: usize) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = match unsafe { libafl_check_malloc_size(ptr) } {\n 0 => size, // either the malloc size function didn't work or it's really zero-sized\n real => real,\n };\n\n", "meta": {"hash_id": "0c03aab5f32b87fe481f95d3b6ca30eaad93caa4e474f1b4e94ed60c208d343c"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 2, "content": " let total = MALLOC_SIZE.fetch_add(size, Ordering::Relaxed) + size;\n if (size > MALLOC_MAX.load(Ordering::Relaxed) || total > RSS_MAX.load(Ordering::Relaxed))\n && !OOMED.swap(true, Ordering::Relaxed)\n {\n unsafe {\n // we need to kill the process in a way that immediately triggers the crash handler\n libc::raise(SIGABRT);\n }\n }\n }\n}\n\n", "meta": {"hash_id": "0e503de49a3c51a26c2a825f899af722723f6f56b21f1e62cbf28aa6a7516c97"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 3, "content": "/// free hook which will be invoked if ASAN is present. Used to detect if the target makes a malloc call that will\n/// exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid allocated pointers, about to be freed.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_free_hook(ptr: *const c_void) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = unsafe { libafl_check_malloc_size(ptr) };\n MALLOC_SIZE\n .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |existing| {\n Some(existing.saturating_sub(size))\n })\n .expect(\"must complete successfully\");\n }\n}\n\n", "meta": {"hash_id": "43f01b7fb06f446651638df9232f7c49fc2aa97fd450bf08d42631baf6e780d7"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 4, "content": "const OOM_OBS_NAME: &str = \"libfuzzer-like-oom\";\n\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n\nimpl Named for OomObserver {\n // strictly one name to prevent two from being registered\n fn name(&self) -> &str {\n OOM_OBS_NAME\n }\n}\n\n", "meta": {"hash_id": "268279b4a6a43b58a018e3f392ac4a34ac9e65cde6c61e7ceb1f833630e14977"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 5, "content": "impl Observer for OomObserver\nwhere\n S: UsesInput,\n{\n fn pre_exec(&mut self, _state: &mut S, _input: &S::Input) -> Result<(), Error> {\n OOMED.store(false, Ordering::Relaxed);\n // must reset for platforms which do not offer malloc tracking\n MALLOC_SIZE.store(0, Ordering::Relaxed);\n RUNNING.store(true, Ordering::Relaxed);\n Ok(())\n }\n\n fn post_exec(\n &mut self,\n _state: &mut S,\n _input: &S::Input,\n _exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n RUNNING.store(false, Ordering::Relaxed);\n self.oomed = OOMED.load(Ordering::Relaxed);\n Ok(())\n }\n\n fn pre_exec_child(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.pre_exec(state, input)\n }\n\n fn post_exec_child(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.post_exec(state, input, exit_kind)\n }\n}\n\n", "meta": {"hash_id": "82f980e4c447cb3a615301b4822dfc9f7bb161305c083b6215d396098336276b"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 6, "content": "/// Feedback for the similarly named [`OomObserver`] to detect if the target crashed due to an observed OOM\n#[derive(Debug, Serialize, Deserialize, Copy, Clone, Default)]\npub struct OomFeedback;\n\nimpl OomFeedback {\n /// Whether the target OOM'd in the last execution\n pub fn oomed() -> bool {\n OOMED.load(Ordering::Relaxed)\n }\n}\n\nimpl Named for OomFeedback {\n fn name(&self) -> &str {\n \"oom\"\n }\n}\n\nimpl Feedback for OomFeedback\nwhere\n S: State,\n{\n fn is_interesting(\n &mut self,\n _state: &mut S,\n _manager: &mut EM,\n _input: &S::Input,\n _observers: &OT,\n _exit_kind: &ExitKind,\n ) -> Result\n where\n EM: EventFirer,\n OT: ObserversTuple,\n {\n Ok(Self::oomed())\n }\n}\n", "meta": {"hash_id": "73b132330ae56dce5a1ec5b99cd554947fc6120b96ef3a710085b2179272882b"}}]}], "golden_chunks": [{"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 4, "content": "const OOM_OBS_NAME: &str = \"libfuzzer-like-oom\";\n\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n\nimpl Named for OomObserver {\n // strictly one name to prevent two from being registered\n fn name(&self) -> &str {\n OOM_OBS_NAME\n }\n}\n\n", "meta": {"hash_id": "268279b4a6a43b58a018e3f392ac4a34ac9e65cde6c61e7ceb1f833630e14977"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_targets/src/libfuzzer/observers/oom.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "How do I create a new `OomObserver`?", "answer": "To create a new `OomObserver`, you can use the `new` associated function and provide the `rss_max` (total heap size) and `malloc_max` (largest permissible `malloc` allocation size) parameters. This function stores these values in the `RSS_MAX` and `MALLOC_MAX` atomic variables, respectively, and returns a new `OomObserver` instance:\n```rust\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n```", "golden_doc_uuids": ["2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd"], "golden_chunk_uuids": [["2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", 4]], "golden_documents": [{"uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "content": "use core::{ffi::c_void, fmt::Debug};\nuse std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};\n\nuse libafl::{\n events::EventFirer,\n executors::ExitKind,\n feedbacks::Feedback,\n inputs::UsesInput,\n observers::{Observer, ObserversTuple},\n state::State,\n Error,\n};\nuse libafl_bolts::Named;\nuse libc::SIGABRT;\nuse serde::{Deserialize, Serialize};\n\nextern \"C\" {\n fn libafl_check_malloc_size(ptr: *const c_void) -> usize;\n}\n\nstatic RUNNING: AtomicBool = AtomicBool::new(false);\nstatic OOMED: AtomicBool = AtomicBool::new(false);\nstatic RSS_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n// 2GB, which is the default\nstatic MALLOC_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n\nstatic MALLOC_SIZE: AtomicUsize = AtomicUsize::new(0);\n\n/// malloc hook which will be invoked if address sanitizer is present. Used to detect if the target makes a malloc call\n/// that will exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid freshly allocated pointers backed by allocations of `size`.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_malloc_hook(ptr: *const c_void, size: usize) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = match unsafe { libafl_check_malloc_size(ptr) } {\n 0 => size, // either the malloc size function didn't work or it's really zero-sized\n real => real,\n };\n\n let total = MALLOC_SIZE.fetch_add(size, Ordering::Relaxed) + size;\n if (size > MALLOC_MAX.load(Ordering::Relaxed) || total > RSS_MAX.load(Ordering::Relaxed))\n && !OOMED.swap(true, Ordering::Relaxed)\n {\n unsafe {\n // we need to kill the process in a way that immediately triggers the crash handler\n libc::raise(SIGABRT);\n }\n }\n }\n}\n\n/// free hook which will be invoked if ASAN is present. Used to detect if the target makes a malloc call that will\n/// exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid allocated pointers, about to be freed.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_free_hook(ptr: *const c_void) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = unsafe { libafl_check_malloc_size(ptr) };\n MALLOC_SIZE\n .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |existing| {\n Some(existing.saturating_sub(size))\n })\n .expect(\"must complete successfully\");\n }\n}\n\nconst OOM_OBS_NAME: &str = \"libfuzzer-like-oom\";\n\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n\nimpl Named for OomObserver {\n // strictly one name to prevent two from being registered\n fn name(&self) -> &str {\n OOM_OBS_NAME\n }\n}\n\nimpl Observer for OomObserver\nwhere\n S: UsesInput,\n{\n fn pre_exec(&mut self, _state: &mut S, _input: &S::Input) -> Result<(), Error> {\n OOMED.store(false, Ordering::Relaxed);\n // must reset for platforms which do not offer malloc tracking\n MALLOC_SIZE.store(0, Ordering::Relaxed);\n RUNNING.store(true, Ordering::Relaxed);\n Ok(())\n }\n\n fn post_exec(\n &mut self,\n _state: &mut S,\n _input: &S::Input,\n _exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n RUNNING.store(false, Ordering::Relaxed);\n self.oomed = OOMED.load(Ordering::Relaxed);\n Ok(())\n }\n\n fn pre_exec_child(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.pre_exec(state, input)\n }\n\n fn post_exec_child(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.post_exec(state, input, exit_kind)\n }\n}\n\n/// Feedback for the similarly named [`OomObserver`] to detect if the target crashed due to an observed OOM\n#[derive(Debug, Serialize, Deserialize, Copy, Clone, Default)]\npub struct OomFeedback;\n\nimpl OomFeedback {\n /// Whether the target OOM'd in the last execution\n pub fn oomed() -> bool {\n OOMED.load(Ordering::Relaxed)\n }\n}\n\nimpl Named for OomFeedback {\n fn name(&self) -> &str {\n \"oom\"\n }\n}\n\nimpl Feedback for OomFeedback\nwhere\n S: State,\n{\n fn is_interesting(\n &mut self,\n _state: &mut S,\n _manager: &mut EM,\n _input: &S::Input,\n _observers: &OT,\n _exit_kind: &ExitKind,\n ) -> Result\n where\n EM: EventFirer,\n OT: ObserversTuple,\n {\n Ok(Self::oomed())\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_targets/src/libfuzzer/observers/oom.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 0, "content": "use core::{ffi::c_void, fmt::Debug};\nuse std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};\n\nuse libafl::{\n events::EventFirer,\n executors::ExitKind,\n feedbacks::Feedback,\n inputs::UsesInput,\n observers::{Observer, ObserversTuple},\n state::State,\n Error,\n};\nuse libafl_bolts::Named;\nuse libc::SIGABRT;\nuse serde::{Deserialize, Serialize};\n\nextern \"C\" {\n fn libafl_check_malloc_size(ptr: *const c_void) -> usize;\n}\n\nstatic RUNNING: AtomicBool = AtomicBool::new(false);\nstatic OOMED: AtomicBool = AtomicBool::new(false);\nstatic RSS_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n// 2GB, which is the default\nstatic MALLOC_MAX: AtomicUsize = AtomicUsize::new(2 << 30);\n\nstatic MALLOC_SIZE: AtomicUsize = AtomicUsize::new(0);\n\n", "meta": {"hash_id": "694023d2052e8716c4842f203e5eab1e9c7f6c0a09f9ed2e5a028b6bb5115477"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 1, "content": "/// malloc hook which will be invoked if address sanitizer is present. Used to detect if the target makes a malloc call\n/// that will exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid freshly allocated pointers backed by allocations of `size`.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_malloc_hook(ptr: *const c_void, size: usize) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = match unsafe { libafl_check_malloc_size(ptr) } {\n 0 => size, // either the malloc size function didn't work or it's really zero-sized\n real => real,\n };\n\n", "meta": {"hash_id": "0c03aab5f32b87fe481f95d3b6ca30eaad93caa4e474f1b4e94ed60c208d343c"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 2, "content": " let total = MALLOC_SIZE.fetch_add(size, Ordering::Relaxed) + size;\n if (size > MALLOC_MAX.load(Ordering::Relaxed) || total > RSS_MAX.load(Ordering::Relaxed))\n && !OOMED.swap(true, Ordering::Relaxed)\n {\n unsafe {\n // we need to kill the process in a way that immediately triggers the crash handler\n libc::raise(SIGABRT);\n }\n }\n }\n}\n\n", "meta": {"hash_id": "0e503de49a3c51a26c2a825f899af722723f6f56b21f1e62cbf28aa6a7516c97"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 3, "content": "/// free hook which will be invoked if ASAN is present. Used to detect if the target makes a malloc call that will\n/// exceed the permissible size\n///\n/// # Safety\n/// Is only safe to call with valid allocated pointers, about to be freed.\n#[no_mangle]\npub unsafe extern \"C\" fn __sanitizer_free_hook(ptr: *const c_void) {\n if RUNNING.load(Ordering::Relaxed) {\n let size = unsafe { libafl_check_malloc_size(ptr) };\n MALLOC_SIZE\n .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |existing| {\n Some(existing.saturating_sub(size))\n })\n .expect(\"must complete successfully\");\n }\n}\n\n", "meta": {"hash_id": "43f01b7fb06f446651638df9232f7c49fc2aa97fd450bf08d42631baf6e780d7"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 4, "content": "const OOM_OBS_NAME: &str = \"libfuzzer-like-oom\";\n\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n\nimpl Named for OomObserver {\n // strictly one name to prevent two from being registered\n fn name(&self) -> &str {\n OOM_OBS_NAME\n }\n}\n\n", "meta": {"hash_id": "268279b4a6a43b58a018e3f392ac4a34ac9e65cde6c61e7ceb1f833630e14977"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 5, "content": "impl Observer for OomObserver\nwhere\n S: UsesInput,\n{\n fn pre_exec(&mut self, _state: &mut S, _input: &S::Input) -> Result<(), Error> {\n OOMED.store(false, Ordering::Relaxed);\n // must reset for platforms which do not offer malloc tracking\n MALLOC_SIZE.store(0, Ordering::Relaxed);\n RUNNING.store(true, Ordering::Relaxed);\n Ok(())\n }\n\n fn post_exec(\n &mut self,\n _state: &mut S,\n _input: &S::Input,\n _exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n RUNNING.store(false, Ordering::Relaxed);\n self.oomed = OOMED.load(Ordering::Relaxed);\n Ok(())\n }\n\n fn pre_exec_child(&mut self, state: &mut S, input: &S::Input) -> Result<(), Error> {\n self.pre_exec(state, input)\n }\n\n fn post_exec_child(\n &mut self,\n state: &mut S,\n input: &S::Input,\n exit_kind: &ExitKind,\n ) -> Result<(), Error> {\n self.post_exec(state, input, exit_kind)\n }\n}\n\n", "meta": {"hash_id": "82f980e4c447cb3a615301b4822dfc9f7bb161305c083b6215d396098336276b"}}, {"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 6, "content": "/// Feedback for the similarly named [`OomObserver`] to detect if the target crashed due to an observed OOM\n#[derive(Debug, Serialize, Deserialize, Copy, Clone, Default)]\npub struct OomFeedback;\n\nimpl OomFeedback {\n /// Whether the target OOM'd in the last execution\n pub fn oomed() -> bool {\n OOMED.load(Ordering::Relaxed)\n }\n}\n\nimpl Named for OomFeedback {\n fn name(&self) -> &str {\n \"oom\"\n }\n}\n\nimpl Feedback for OomFeedback\nwhere\n S: State,\n{\n fn is_interesting(\n &mut self,\n _state: &mut S,\n _manager: &mut EM,\n _input: &S::Input,\n _observers: &OT,\n _exit_kind: &ExitKind,\n ) -> Result\n where\n EM: EventFirer,\n OT: ObserversTuple,\n {\n Ok(Self::oomed())\n }\n}\n", "meta": {"hash_id": "73b132330ae56dce5a1ec5b99cd554947fc6120b96ef3a710085b2179272882b"}}]}], "golden_chunks": [{"doc_uuid": "2b9a8221386274740c40ffa7cdeee92c189fa3f9f59f17c347bfa99abbfa84cd", "index": 4, "content": "const OOM_OBS_NAME: &str = \"libfuzzer-like-oom\";\n\n/// Observer which detects if the target would run out of memory or otherwise violate the permissible usage of malloc\n#[derive(Debug, Serialize, Deserialize)]\npub struct OomObserver {\n oomed: bool,\n}\n\nimpl OomObserver {\n /// Create a [`OomObserver`] with the provided `rss_max` (total heap size) and `malloc_max` (largest permissible malloc\n /// allocation size)\n pub fn new(rss_max: usize, malloc_max: usize) -> Self {\n RSS_MAX.store(rss_max, Ordering::Relaxed);\n MALLOC_MAX.store(malloc_max, Ordering::Relaxed);\n Self { oomed: false }\n }\n}\n\nimpl Named for OomObserver {\n // strictly one name to prevent two from being registered\n fn name(&self) -> &str {\n OOM_OBS_NAME\n }\n}\n\n", "meta": {"hash_id": "268279b4a6a43b58a018e3f392ac4a34ac9e65cde6c61e7ceb1f833630e14977"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_targets/src/libfuzzer/observers/oom.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "What does the function `both_require` do?", "answer": "The function `both_require` checks if the given byte array `bytes` of length `len` starts with the sequence 'a', 'b', 'c'. It returns `ACCEPT` if the sequence is found at the beginning of the array, and `REJECT` otherwise.", "golden_doc_uuids": ["531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f"], "golden_chunk_uuids": [["531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", 0]], "golden_documents": [{"uuid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_swap_differential/common.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", "index": 0, "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", "meta": {"hash_id": "2e4a1a14ecca4ae7cfe62e08736a960b4cb5a982db0ff228545350aedaf97a60"}}]}], "golden_chunks": [{"doc_uuid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", "index": 0, "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", "meta": {"hash_id": "2e4a1a14ecca4ae7cfe62e08736a960b4cb5a982db0ff228545350aedaf97a60"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_swap_differential/common.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How does `both_require` check for the sequence 'a', 'b', 'c'?", "answer": "`both_require` uses a series of nested if statements to check for the sequence. First, it checks if `len >= 1 && bytes[0] == 'a'`, then if `len >= 2 && bytes[1] == 'b'`, and finally if `len >= 3 && bytes[2] == 'c'`. If all these conditions are met, it means the sequence 'a', 'b', 'c' is found at the beginning of the array, and the function returns `ACCEPT`.", "golden_doc_uuids": ["531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f"], "golden_chunk_uuids": [["531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", 0]], "golden_documents": [{"uuid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_swap_differential/common.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", "index": 0, "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", "meta": {"hash_id": "2e4a1a14ecca4ae7cfe62e08736a960b4cb5a982db0ff228545350aedaf97a60"}}]}], "golden_chunks": [{"doc_uuid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", "index": 0, "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", "meta": {"hash_id": "2e4a1a14ecca4ae7cfe62e08736a960b4cb5a982db0ff228545350aedaf97a60"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_swap_differential/common.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `len` parameter in `both_require`?", "answer": "The `len` parameter represents the length of the byte array `bytes`. It is used in the if conditions to ensure that the function does not access elements beyond the end of the array. For example, `if (len >= 1 && bytes[0] == 'a')` checks if the length is at least 1 before accessing `bytes[0]`.", "golden_doc_uuids": ["531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f"], "golden_chunk_uuids": [["531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", 0]], "golden_documents": [{"uuid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_swap_differential/common.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", "index": 0, "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", "meta": {"hash_id": "2e4a1a14ecca4ae7cfe62e08736a960b4cb5a982db0ff228545350aedaf97a60"}}]}], "golden_chunks": [{"doc_uuid": "531430fb53d5505059ecf3d7c8b4b6dd2a8ea035e0b37da202c385b706c7890f", "index": 0, "content": "#include \"common.h\"\n\nbool both_require(const uint8_t *bytes, size_t len) {\n if (len >= 1 && bytes[0] == 'a') {\n if (len >= 2 && bytes[1] == 'b') {\n if (len >= 3 && bytes[2] == 'c') { return ACCEPT; }\n }\n }\n return REJECT;\n}", "meta": {"hash_id": "2e4a1a14ecca4ae7cfe62e08736a960b4cb5a982db0ff228545350aedaf97a60"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/baby_fuzzer_swap_differential/common.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What does the vuln() function do?", "answer": "The vuln() function compares its input string buf against the string \"vuln\" using strcmp(). If they are equal, it calls abort() to terminate the program abnormally, as shown in the code:\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n", "golden_doc_uuids": ["0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95"], "golden_chunk_uuids": [["0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", 0]], "golden_documents": [{"uuid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/forkserver_libafl_cc/src/program.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", "index": 0, "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", "meta": {"hash_id": "c1d1e82b3d6a243e6e01cb78ec7979313ace5dcce92afb9aa240c0f0fb4f3589"}}]}], "golden_chunks": [{"doc_uuid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", "index": 0, "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", "meta": {"hash_id": "c1d1e82b3d6a243e6e01cb78ec7979313ace5dcce92afb9aa240c0f0fb4f3589"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/forkserver_libafl_cc/src/program.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How is input normally read in the main() function?", "answer": "The code for normal input reading is commented out, but it shows that input is read from either stdin or a file specified as a command-line argument using fgets() into a buffer buf of size 16, as shown here:\n\nFILE *file = stdin;\nif (argc > 1) { file = fopen(argv[1], \"rb\"); }\nchar buf[16];\nchar *p = fgets(buf, 16, file);\nbuf[15] = 0;\n", "golden_doc_uuids": ["0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95"], "golden_chunk_uuids": [["0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", 0]], "golden_documents": [{"uuid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/forkserver_libafl_cc/src/program.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", "index": 0, "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", "meta": {"hash_id": "c1d1e82b3d6a243e6e01cb78ec7979313ace5dcce92afb9aa240c0f0fb4f3589"}}]}], "golden_chunks": [{"doc_uuid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", "index": 0, "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", "meta": {"hash_id": "c1d1e82b3d6a243e6e01cb78ec7979313ace5dcce92afb9aa240c0f0fb4f3589"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/forkserver_libafl_cc/src/program.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What input condition causes the program to abort in the main() function?", "answer": "The main() function checks if the first three characters of the input buf are 'b', 'a', and 'd' respectively. If this condition is met, the program calls abort() to terminate abnormally, as shown in the code:\n\nif (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n}\n", "golden_doc_uuids": ["0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95"], "golden_chunk_uuids": [["0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", 0]], "golden_documents": [{"uuid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", "meta": {"doctype": "codebase", "relative_path": "/fuzzers/forkserver_libafl_cc/src/program.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", "index": 0, "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", "meta": {"hash_id": "c1d1e82b3d6a243e6e01cb78ec7979313ace5dcce92afb9aa240c0f0fb4f3589"}}]}], "golden_chunks": [{"doc_uuid": "0732e22d364e4359bf093902d674d9ec891bf9a2b4281da5c5bebc1d67879f95", "index": 0, "content": "#include \n#include \n#include \n\n// The following line is needed for shared memory testcase fuzzing\n__AFL_FUZZ_INIT();\n\nvoid vuln(char *buf) {\n if (strcmp(buf, \"vuln\") == 0) { abort(); }\n}\n\nint main(int argc, char **argv) {\n // Start the forkserver at this point (i.e., forks will happen here)\n __AFL_INIT();\n\n // The following five lines are for normal fuzzing.\n /*\n FILE *file = stdin;\n if (argc > 1) { file = fopen(argv[1], \"rb\"); }\n char buf[16];\n char *p = fgets(buf, 16, file);\n buf[15] = 0;\n */\n\n // The following line is also needed for shared memory testcase fuzzing\n unsigned char *buf = __AFL_FUZZ_TESTCASE_BUF; // must be after __AFL_INIT\n\n // printf(\"input: %s\\n\", buf);\n if (buf[0] == 'b') {\n if (buf[1] == 'a') {\n if (buf[2] == 'd') { abort(); }\n }\n }\n vuln((char *)buf);\n\n return 0;\n}", "meta": {"hash_id": "c1d1e82b3d6a243e6e01cb78ec7979313ace5dcce92afb9aa240c0f0fb4f3589"}}], "meta": {"doctype": "codebase", "relative_path": "/fuzzers/forkserver_libafl_cc/src/program.c", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `MergeScheduler` struct?", "answer": "The `MergeScheduler` struct is a custom scheduler implementation for the libafl fuzzing library. It maintains a mapping between coverage indices and corpus IDs, as well as a set of all corpus IDs. The purpose of this scheduler is not explicitly stated, but based on the `removable()` and `current()` methods, it seems to be used for identifying and managing removable and current corpus items during the fuzzing process.", "golden_doc_uuids": ["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210"], "golden_chunk_uuids": [["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", 0]], "golden_documents": [{"uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\nimpl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 0, "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n", "meta": {"hash_id": "fbe0da553b6d47839105bcf76b3af8ad3096409a64c4ab88aabf186cdb7ff26c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 1, "content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n", "meta": {"hash_id": "5c67c92d61da47bb5efcd9b2548e190829766323c323d8016a067fd8411b964c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 2, "content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"hash_id": "80dd862994e6daf4fd9e3442695d6ac8eb1e21e9b9ff78d8fb15add5ebbb1f44"}}]}], "golden_chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 0, "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n", "meta": {"hash_id": "fbe0da553b6d47839105bcf76b3af8ad3096409a64c4ab88aabf186cdb7ff26c"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How does the `on_add` method of the `MergeScheduler` work?", "answer": "The `on_add` method is called when a new testcase is added to the corpus. It performs the following steps:\n1. Inserts the new corpus ID into the `all` set using `self.all.insert(idx)`.\n2. Retrieves the testcase associated with the corpus ID from the state using `state.corpus().get(idx)?.borrow()`.\n3. Retrieves the `MapNoveltiesMetadata` from the testcase using `testcase.metadata::()?`.\n4. Iterates over the coverage indices in the metadata's `list` and inserts them into the `mapping` HashMap, associating each coverage index with the corresponding corpus ID using `self.mapping.insert(*cov_idx, idx)`.\n5. Returns `Ok(())` if all operations are successful, or an error if any step fails.", "golden_doc_uuids": ["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210"], "golden_chunk_uuids": [["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", 1]], "golden_documents": [{"uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\nimpl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 0, "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n", "meta": {"hash_id": "fbe0da553b6d47839105bcf76b3af8ad3096409a64c4ab88aabf186cdb7ff26c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 1, "content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n", "meta": {"hash_id": "5c67c92d61da47bb5efcd9b2548e190829766323c323d8016a067fd8411b964c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 2, "content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"hash_id": "80dd862994e6daf4fd9e3442695d6ac8eb1e21e9b9ff78d8fb15add5ebbb1f44"}}]}], "golden_chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 1, "content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n", "meta": {"hash_id": "5c67c92d61da47bb5efcd9b2548e190829766323c323d8016a067fd8411b964c"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `removable()` method in the `MergeScheduler`?", "answer": "The `removable()` method is used to identify the corpus IDs that are removable from the scheduler. It does this by performing a set difference operation between the `all` set (containing all corpus IDs) and the set of corpus IDs present in the `mapping` HashMap (obtained by collecting the values of the HashMap into a set). The resulting set contains the corpus IDs that are in the `all` set but not in the `mapping` set, indicating that they are removable. This method returns a new `BTreeSet` containing the removable corpus IDs.", "golden_doc_uuids": ["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210"], "golden_chunk_uuids": [["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", 2]], "golden_documents": [{"uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\nimpl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 0, "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n", "meta": {"hash_id": "fbe0da553b6d47839105bcf76b3af8ad3096409a64c4ab88aabf186cdb7ff26c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 1, "content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n", "meta": {"hash_id": "5c67c92d61da47bb5efcd9b2548e190829766323c323d8016a067fd8411b964c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 2, "content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"hash_id": "80dd862994e6daf4fd9e3442695d6ac8eb1e21e9b9ff78d8fb15add5ebbb1f44"}}]}], "golden_chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 2, "content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"hash_id": "80dd862994e6daf4fd9e3442695d6ac8eb1e21e9b9ff78d8fb15add5ebbb1f44"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How does the `on_remove` method of the `MergeScheduler` work?", "answer": "The `on_remove` method is called when a testcase is removed from the corpus. It takes the following parameters:\n- `_state`: a mutable reference to the fuzzer state (unused in this implementation).\n- `idx`: the corpus ID of the testcase being removed.\n- `_testcase`: an optional reference to the testcase being removed (unused in this implementation).\n\nThe method simply removes the provided corpus ID from the `all` set using `self.all.remove(&idx)`, indicating that the testcase is no longer part of the scheduler's managed corpus. It returns `Ok(())` to indicate a successful removal.", "golden_doc_uuids": ["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210"], "golden_chunk_uuids": [["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", 1]], "golden_documents": [{"uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\nimpl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 0, "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n", "meta": {"hash_id": "fbe0da553b6d47839105bcf76b3af8ad3096409a64c4ab88aabf186cdb7ff26c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 1, "content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n", "meta": {"hash_id": "5c67c92d61da47bb5efcd9b2548e190829766323c323d8016a067fd8411b964c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 2, "content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"hash_id": "80dd862994e6daf4fd9e3442695d6ac8eb1e21e9b9ff78d8fb15add5ebbb1f44"}}]}], "golden_chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 1, "content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n", "meta": {"hash_id": "5c67c92d61da47bb5efcd9b2548e190829766323c323d8016a067fd8411b964c"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `current()` method in the `MergeScheduler`?", "answer": "The `current()` method returns a shared reference to the `all` set, which contains all the corpus IDs currently managed by the scheduler. This method allows other parts of the code to access the current set of corpus IDs without modifying it.", "golden_doc_uuids": ["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210"], "golden_chunk_uuids": [["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", 2]], "golden_documents": [{"uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\nimpl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 0, "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n", "meta": {"hash_id": "fbe0da553b6d47839105bcf76b3af8ad3096409a64c4ab88aabf186cdb7ff26c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 1, "content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n", "meta": {"hash_id": "5c67c92d61da47bb5efcd9b2548e190829766323c323d8016a067fd8411b964c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 2, "content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"hash_id": "80dd862994e6daf4fd9e3442695d6ac8eb1e21e9b9ff78d8fb15add5ebbb1f44"}}]}], "golden_chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 2, "content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"hash_id": "80dd862994e6daf4fd9e3442695d6ac8eb1e21e9b9ff78d8fb15add5ebbb1f44"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "Why is the `next()` method of the `MergeScheduler` unimplemented?", "answer": "The `next()` method is declared as part of the `Scheduler` trait implementation for `MergeScheduler`, but it is intentionally left unimplemented. The code comment `unimplemented!(\"Not suitable for actual scheduling.\");` suggests that this scheduler is not designed to be used for actual testcase scheduling during the fuzzing process. The `MergeScheduler` seems to be focused on managing the corpus and identifying removable testcases rather than selecting the next testcase to be executed.", "golden_doc_uuids": ["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210"], "golden_chunk_uuids": [["9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", 0]], "golden_documents": [{"uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\nimpl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 0, "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n", "meta": {"hash_id": "fbe0da553b6d47839105bcf76b3af8ad3096409a64c4ab88aabf186cdb7ff26c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 1, "content": "impl RemovableScheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_remove(\n &mut self,\n _state: &mut Self::State,\n idx: CorpusId,\n _testcase: &Option::Input>>,\n ) -> Result<(), Error> {\n self.all.remove(&idx);\n Ok(())\n }\n}\n\nimpl Scheduler for MergeScheduler\nwhere\n S: State + HasCorpus,\n{\n fn on_add(&mut self, state: &mut Self::State, idx: CorpusId) -> Result<(), Error> {\n self.all.insert(idx);\n let testcase = state.corpus().get(idx)?.borrow();\n let meta = testcase.metadata::()?;\n for cov_idx in &meta.list {\n self.mapping.insert(*cov_idx, idx);\n }\n Ok(())\n }\n\n", "meta": {"hash_id": "5c67c92d61da47bb5efcd9b2548e190829766323c323d8016a067fd8411b964c"}}, {"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 2, "content": " fn next(&mut self, _state: &mut Self::State) -> Result {\n unimplemented!(\"Not suitable for actual scheduling.\");\n }\n}\n\nimpl MergeScheduler {\n pub fn new() -> Self {\n Self {\n mapping: HashMap::default(),\n all: BTreeSet::default(),\n phantom: PhantomData,\n }\n }\n\n pub fn removable(&self) -> BTreeSet {\n self.all\n .difference(&self.mapping.values().copied().collect())\n .copied()\n .collect()\n }\n\n pub fn current(&self) -> &BTreeSet {\n &self.all\n }\n}\n", "meta": {"hash_id": "80dd862994e6daf4fd9e3442695d6ac8eb1e21e9b9ff78d8fb15add5ebbb1f44"}}]}], "golden_chunks": [{"doc_uuid": "9de08c4cbd3e0aca53020c82fcb434e39fc07d85fc092617f89c993d1fd28210", "index": 0, "content": "use std::{\n collections::{BTreeSet, HashMap},\n marker::PhantomData,\n};\n\nuse libafl::{\n corpus::{Corpus, CorpusId, Testcase},\n feedbacks::MapNoveltiesMetadata,\n inputs::UsesInput,\n schedulers::{RemovableScheduler, Scheduler},\n state::{HasCorpus, HasMetadata, State, UsesState},\n Error,\n};\n\n#[derive(Clone, Debug)]\npub struct MergeScheduler {\n mapping: HashMap,\n all: BTreeSet,\n phantom: PhantomData,\n}\n\nimpl UsesState for MergeScheduler\nwhere\n S: State,\n{\n type State = S;\n}\n\n", "meta": {"hash_id": "fbe0da553b6d47839105bcf76b3af8ad3096409a64c4ab88aabf186cdb7ff26c"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_libfuzzer/libafl_libfuzzer_runtime/src/schedulers.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How are the `Fp` and `Lr` registers defined as aliases in the `Regs` enum?", "answer": "The `Fp` and `Lr` registers are defined as aliases using an `impl` block for the `Regs` enum:\n```rust\nimpl Regs {\n pub const Fp: Regs = Regs::X29;\n pub const Lr: Regs = Regs::X30;\n}\n```\nThis allows using `Regs::Fp` and `Regs::Lr` to refer to the `X29` and `X30` registers, respectively.", "golden_doc_uuids": ["a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f"], "golden_chunk_uuids": [["a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", 2]], "golden_documents": [{"uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "content": "use std::sync::OnceLock;\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::aarch64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n X0 = 0,\n X1 = 1,\n X2 = 2,\n X3 = 3,\n X4 = 4,\n X5 = 5,\n X6 = 6,\n X7 = 7,\n X8 = 8,\n X9 = 9,\n X10 = 10,\n X11 = 11,\n X12 = 12,\n X13 = 13,\n X14 = 14,\n X15 = 15,\n X16 = 16,\n X17 = 17,\n X18 = 18,\n X19 = 19,\n X20 = 20,\n X21 = 21,\n X22 = 22,\n X23 = 23,\n X24 = 24,\n X25 = 25,\n X26 = 26,\n X27 = 27,\n X28 = 28,\n X29 = 29,\n X30 = 30,\n Sp = 31,\n Pc = 32,\n Pstate = 33,\n}\n\nstatic BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::X0,\n BackdoorArgs::Cmd => Regs::X0,\n BackdoorArgs::Arg1 => Regs::X1,\n BackdoorArgs::Arg2 => Regs::X2,\n BackdoorArgs::Arg3 => Regs::X3,\n BackdoorArgs::Arg4 => Regs::X4,\n BackdoorArgs::Arg5 => Regs::X5,\n BackdoorArgs::Arg6 => Regs::X6,\n }\n })\n}\n\n/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Fp: Regs = Regs::X29;\n pub const Lr: Regs = Regs::X30;\n}\n\n/// Return an ARM64 ArchCapstoneBuilder\npub fn capstone() -> capstone::arch::arm64::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .arm64()\n .mode(capstone::arch::arm64::ArchMode::Arm)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n self.read_reg(Regs::Lr)\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n self.write_reg(Regs::Lr, val)\n }\n\n fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::X0,\n 1 => Regs::X1,\n 2 => Regs::X2,\n 3 => Regs::X3,\n 4 => Regs::X4,\n 5 => Regs::X5,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::X0, val),\n 1 => self.write_reg(Regs::X1, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_qemu/src/aarch64.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 0, "content": "use std::sync::OnceLock;\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::aarch64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n X0 = 0,\n X1 = 1,\n X2 = 2,\n X3 = 3,\n X4 = 4,\n X5 = 5,\n X6 = 6,\n X7 = 7,\n X8 = 8,\n X9 = 9,\n X10 = 10,\n X11 = 11,\n X12 = 12,\n X13 = 13,\n X14 = 14,\n X15 = 15,\n X16 = 16,\n X17 = 17,\n X18 = 18,\n X19 = 19,\n X20 = 20,\n X21 = 21,\n X22 = 22,\n X23 = 23,\n X24 = 24,\n X25 = 25,\n X26 = 26,\n X27 = 27,\n X28 = 28,\n X29 = 29,\n X30 = 30,\n Sp = 31,\n Pc = 32,\n Pstate = 33,\n}\n\n", "meta": {"hash_id": "2a99dc5ec2948319ae2b833cd5981a885ede825d48f8760d2af6fc7c7d593af3"}}, {"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 1, "content": "static BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::X0,\n BackdoorArgs::Cmd => Regs::X0,\n BackdoorArgs::Arg1 => Regs::X1,\n BackdoorArgs::Arg2 => Regs::X2,\n BackdoorArgs::Arg3 => Regs::X3,\n BackdoorArgs::Arg4 => Regs::X4,\n BackdoorArgs::Arg5 => Regs::X5,\n BackdoorArgs::Arg6 => Regs::X6,\n }\n })\n}\n\n", "meta": {"hash_id": "70615d2977ac97ee608bb4dc41d296a2acee96573b19b80d4ffc3f6d33bea137"}}, {"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 2, "content": "/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Fp: Regs = Regs::X29;\n pub const Lr: Regs = Regs::X30;\n}\n\n/// Return an ARM64 ArchCapstoneBuilder\npub fn capstone() -> capstone::arch::arm64::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .arm64()\n .mode(capstone::arch::arm64::ArchMode::Arm)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n self.read_reg(Regs::Lr)\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n self.write_reg(Regs::Lr, val)\n }\n\n", "meta": {"hash_id": "3e41f90e0745788f6227903e3734813d408a518e543d1105237c4edaad0d568a"}}, {"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 3, "content": " fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::X0,\n 1 => Regs::X1,\n 2 => Regs::X2,\n 3 => Regs::X3,\n 4 => Regs::X4,\n 5 => Regs::X5,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n", "meta": {"hash_id": "156aa4935b8998341427f5aba1997c02f663ebeb42219ff6ed8b7eec1931b32f"}}, {"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 4, "content": " fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::X0, val),\n 1 => self.write_reg(Regs::X1, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n", "meta": {"hash_id": "27b5cb2a32111638cfa758256b3e4ab4535ece4a26d6088f57c2e1852fb7d43a"}}]}], "golden_chunks": [{"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 2, "content": "/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Fp: Regs = Regs::X29;\n pub const Lr: Regs = Regs::X30;\n}\n\n/// Return an ARM64 ArchCapstoneBuilder\npub fn capstone() -> capstone::arch::arm64::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .arm64()\n .mode(capstone::arch::arm64::ArchMode::Arm)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n self.read_reg(Regs::Lr)\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n self.write_reg(Regs::Lr, val)\n }\n\n", "meta": {"hash_id": "3e41f90e0745788f6227903e3734813d408a518e543d1105237c4edaad0d568a"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_qemu/src/aarch64.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `get_backdoor_arch_regs` function?", "answer": "The `get_backdoor_arch_regs` function returns a reference to a static `EnumMap` that maps `BackdoorArgs` to `Regs`. It is defined as:\n```rust\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::X0,\n BackdoorArgs::Cmd => Regs::X0,\n BackdoorArgs::Arg1 => Regs::X1,\n BackdoorArgs::Arg2 => Regs::X2,\n BackdoorArgs::Arg3 => Regs::X3,\n BackdoorArgs::Arg4 => Regs::X4,\n BackdoorArgs::Arg5 => Regs::X5,\n BackdoorArgs::Arg6 => Regs::X6,\n }\n })\n}\n```\nIt uses the `OnceLock` type to ensure that the `EnumMap` is initialized only once, and the `enum_map!` macro to define the mapping between `BackdoorArgs` and `Regs`.", "golden_doc_uuids": ["a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f"], "golden_chunk_uuids": [["a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", 1]], "golden_documents": [{"uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "content": "use std::sync::OnceLock;\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::aarch64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n X0 = 0,\n X1 = 1,\n X2 = 2,\n X3 = 3,\n X4 = 4,\n X5 = 5,\n X6 = 6,\n X7 = 7,\n X8 = 8,\n X9 = 9,\n X10 = 10,\n X11 = 11,\n X12 = 12,\n X13 = 13,\n X14 = 14,\n X15 = 15,\n X16 = 16,\n X17 = 17,\n X18 = 18,\n X19 = 19,\n X20 = 20,\n X21 = 21,\n X22 = 22,\n X23 = 23,\n X24 = 24,\n X25 = 25,\n X26 = 26,\n X27 = 27,\n X28 = 28,\n X29 = 29,\n X30 = 30,\n Sp = 31,\n Pc = 32,\n Pstate = 33,\n}\n\nstatic BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::X0,\n BackdoorArgs::Cmd => Regs::X0,\n BackdoorArgs::Arg1 => Regs::X1,\n BackdoorArgs::Arg2 => Regs::X2,\n BackdoorArgs::Arg3 => Regs::X3,\n BackdoorArgs::Arg4 => Regs::X4,\n BackdoorArgs::Arg5 => Regs::X5,\n BackdoorArgs::Arg6 => Regs::X6,\n }\n })\n}\n\n/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Fp: Regs = Regs::X29;\n pub const Lr: Regs = Regs::X30;\n}\n\n/// Return an ARM64 ArchCapstoneBuilder\npub fn capstone() -> capstone::arch::arm64::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .arm64()\n .mode(capstone::arch::arm64::ArchMode::Arm)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n self.read_reg(Regs::Lr)\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n self.write_reg(Regs::Lr, val)\n }\n\n fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::X0,\n 1 => Regs::X1,\n 2 => Regs::X2,\n 3 => Regs::X3,\n 4 => Regs::X4,\n 5 => Regs::X5,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::X0, val),\n 1 => self.write_reg(Regs::X1, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_qemu/src/aarch64.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 0, "content": "use std::sync::OnceLock;\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::aarch64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n X0 = 0,\n X1 = 1,\n X2 = 2,\n X3 = 3,\n X4 = 4,\n X5 = 5,\n X6 = 6,\n X7 = 7,\n X8 = 8,\n X9 = 9,\n X10 = 10,\n X11 = 11,\n X12 = 12,\n X13 = 13,\n X14 = 14,\n X15 = 15,\n X16 = 16,\n X17 = 17,\n X18 = 18,\n X19 = 19,\n X20 = 20,\n X21 = 21,\n X22 = 22,\n X23 = 23,\n X24 = 24,\n X25 = 25,\n X26 = 26,\n X27 = 27,\n X28 = 28,\n X29 = 29,\n X30 = 30,\n Sp = 31,\n Pc = 32,\n Pstate = 33,\n}\n\n", "meta": {"hash_id": "2a99dc5ec2948319ae2b833cd5981a885ede825d48f8760d2af6fc7c7d593af3"}}, {"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 1, "content": "static BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::X0,\n BackdoorArgs::Cmd => Regs::X0,\n BackdoorArgs::Arg1 => Regs::X1,\n BackdoorArgs::Arg2 => Regs::X2,\n BackdoorArgs::Arg3 => Regs::X3,\n BackdoorArgs::Arg4 => Regs::X4,\n BackdoorArgs::Arg5 => Regs::X5,\n BackdoorArgs::Arg6 => Regs::X6,\n }\n })\n}\n\n", "meta": {"hash_id": "70615d2977ac97ee608bb4dc41d296a2acee96573b19b80d4ffc3f6d33bea137"}}, {"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 2, "content": "/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Fp: Regs = Regs::X29;\n pub const Lr: Regs = Regs::X30;\n}\n\n/// Return an ARM64 ArchCapstoneBuilder\npub fn capstone() -> capstone::arch::arm64::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .arm64()\n .mode(capstone::arch::arm64::ArchMode::Arm)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n self.read_reg(Regs::Lr)\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n self.write_reg(Regs::Lr, val)\n }\n\n", "meta": {"hash_id": "3e41f90e0745788f6227903e3734813d408a518e543d1105237c4edaad0d568a"}}, {"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 3, "content": " fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::X0,\n 1 => Regs::X1,\n 2 => Regs::X2,\n 3 => Regs::X3,\n 4 => Regs::X4,\n 5 => Regs::X5,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n", "meta": {"hash_id": "156aa4935b8998341427f5aba1997c02f663ebeb42219ff6ed8b7eec1931b32f"}}, {"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 4, "content": " fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::X0, val),\n 1 => self.write_reg(Regs::X1, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n", "meta": {"hash_id": "27b5cb2a32111638cfa758256b3e4ab4535ece4a26d6088f57c2e1852fb7d43a"}}]}], "golden_chunks": [{"doc_uuid": "a72987c2673fe8ea07415380a869075c4e492ac6323ef0d1126bec73e8d4319f", "index": 1, "content": "static BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::X0,\n BackdoorArgs::Cmd => Regs::X0,\n BackdoorArgs::Arg1 => Regs::X1,\n BackdoorArgs::Arg2 => Regs::X2,\n BackdoorArgs::Arg3 => Regs::X3,\n BackdoorArgs::Arg4 => Regs::X4,\n BackdoorArgs::Arg5 => Regs::X5,\n BackdoorArgs::Arg6 => Regs::X6,\n }\n })\n}\n\n", "meta": {"hash_id": "70615d2977ac97ee608bb4dc41d296a2acee96573b19b80d4ffc3f6d33bea137"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_qemu/src/aarch64.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How do I get the `EnumMap` of backdoor architecture registers?", "answer": "To get the `EnumMap` of backdoor architecture registers, you can use the `get_backdoor_arch_regs` function. This function is defined as follows:\n\n```rust\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::Rax,\n BackdoorArgs::Cmd => Regs::Rax,\n BackdoorArgs::Arg1 => Regs::Rdi,\n BackdoorArgs::Arg2 => Regs::Rsi,\n BackdoorArgs::Arg3 => Regs::Rdx,\n BackdoorArgs::Arg4 => Regs::R10,\n BackdoorArgs::Arg5 => Regs::R8,\n BackdoorArgs::Arg6 => Regs::R9,\n }\n })\n}\n```\n\nThe `get_backdoor_arch_regs` function uses the `OnceLock` type to lazily initialize the `EnumMap` on the first call and return a reference to it. The `EnumMap` maps the `BackdoorArgs` enum to the corresponding `Regs` enum values, representing the registers used for different backdoor arguments.", "golden_doc_uuids": ["86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748"], "golden_chunk_uuids": [["86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748", 0]], "golden_documents": [{"uuid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748", "content": "use std::{mem::size_of, sync::OnceLock};\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::x86_64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n Rax = 0,\n Rbx = 1,\n Rcx = 2,\n Rdx = 3,\n Rsi = 4,\n Rdi = 5,\n Rbp = 6,\n Rsp = 7,\n R8 = 8,\n R9 = 9,\n R10 = 10,\n R11 = 11,\n R12 = 12,\n R13 = 13,\n R14 = 14,\n R15 = 15,\n Rip = 16,\n Rflags = 17,\n}\n\nstatic BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\npub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::Rax,\n BackdoorArgs::Cmd => Regs::Rax,\n BackdoorArgs::Arg1 => Regs::Rdi,\n BackdoorArgs::Arg2 => Regs::Rsi,\n BackdoorArgs::Arg3 => Regs::Rdx,\n BackdoorArgs::Arg4 => Regs::R10,\n BackdoorArgs::Arg5 => Regs::R8,\n BackdoorArgs::Arg6 => Regs::R9,\n }\n })\n}\n\n/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Sp: Regs = Regs::Rsp;\n pub const Pc: Regs = Regs::Rip;\n}\n\n/// Return an X86 `ArchCapstoneBuilder`\n#[must_use]\npub fn capstone() -> capstone::arch::x86::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .x86()\n .mode(capstone::arch::x86::ArchMode::Mode64)\n}\n\npub type GuestReg = u64;\n\nimpl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let mut ret_addr = [0; size_of::()];\n unsafe { self.read_mem(stack_ptr, &mut ret_addr) };\n Ok(GuestReg::from_le_bytes(ret_addr).into())\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let val: GuestReg = val.into();\n let ret_addr = val.to_le_bytes();\n unsafe { self.write_mem(stack_ptr, &ret_addr) };\n Ok(())\n }\n\n fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::Rdi,\n 1 => Regs::Rsi,\n 2 => Regs::Rdx,\n 3 => Regs::Rcx,\n 4 => Regs::R8,\n 5 => Regs::R9,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::Rdi, val),\n 1 => self.write_reg(Regs::Rsi, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl_qemu/src/x86_64.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748", "index": 0, "content": "use std::{mem::size_of, sync::OnceLock};\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::x86_64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n Rax = 0,\n Rbx = 1,\n Rcx = 2,\n Rdx = 3,\n Rsi = 4,\n Rdi = 5,\n Rbp = 6,\n Rsp = 7,\n R8 = 8,\n R9 = 9,\n R10 = 10,\n R11 = 11,\n R12 = 12,\n R13 = 13,\n R14 = 14,\n R15 = 15,\n Rip = 16,\n Rflags = 17,\n}\n\nstatic BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\n", "meta": {"hash_id": "ba344baa82e01ee5d702301a09402ca83c9c5a6e9310a4d647d06ce1bda79219"}}, {"doc_uuid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748", "index": 1, "content": "pub fn get_backdoor_arch_regs() -> &'static EnumMap {\n BACKDOOR_ARCH_REGS.get_or_init(|| {\n enum_map! {\n BackdoorArgs::Ret => Regs::Rax,\n BackdoorArgs::Cmd => Regs::Rax,\n BackdoorArgs::Arg1 => Regs::Rdi,\n BackdoorArgs::Arg2 => Regs::Rsi,\n BackdoorArgs::Arg3 => Regs::Rdx,\n BackdoorArgs::Arg4 => Regs::R10,\n BackdoorArgs::Arg5 => Regs::R8,\n BackdoorArgs::Arg6 => Regs::R9,\n }\n })\n}\n\n/// alias registers\n#[allow(non_upper_case_globals)]\nimpl Regs {\n pub const Sp: Regs = Regs::Rsp;\n pub const Pc: Regs = Regs::Rip;\n}\n\n/// Return an X86 `ArchCapstoneBuilder`\n#[must_use]\npub fn capstone() -> capstone::arch::x86::ArchCapstoneBuilder {\n capstone::Capstone::new()\n .x86()\n .mode(capstone::arch::x86::ArchMode::Mode64)\n}\n\npub type GuestReg = u64;\n\n", "meta": {"hash_id": "b4ba3c23e8049d2855d96bcb31ebcfb4c35023d74ad02f3eda5a92f8a6f57427"}}, {"doc_uuid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748", "index": 2, "content": "impl crate::ArchExtras for crate::CPU {\n fn read_return_address(&self) -> Result\n where\n T: From,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let mut ret_addr = [0; size_of::()];\n unsafe { self.read_mem(stack_ptr, &mut ret_addr) };\n Ok(GuestReg::from_le_bytes(ret_addr).into())\n }\n\n fn write_return_address(&self, val: T) -> Result<(), String>\n where\n T: Into,\n {\n let stack_ptr: GuestReg = self.read_reg(Regs::Rsp)?;\n let val: GuestReg = val.into();\n let ret_addr = val.to_le_bytes();\n unsafe { self.write_mem(stack_ptr, &ret_addr) };\n Ok(())\n }\n\n", "meta": {"hash_id": "fca9e514ce83380c64040e1de47c7d2e588fdce6593d51c459e17c2b24924284"}}, {"doc_uuid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748", "index": 3, "content": " fn read_function_argument(&self, conv: CallingConvention, idx: u8) -> Result\n where\n T: From,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let reg_id = match idx {\n 0 => Regs::Rdi,\n 1 => Regs::Rsi,\n 2 => Regs::Rdx,\n 3 => Regs::Rcx,\n 4 => Regs::R8,\n 5 => Regs::R9,\n r => return Err(format!(\"Unsupported argument: {r:}\")),\n };\n\n self.read_reg(reg_id)\n }\n\n", "meta": {"hash_id": "d272f37ed2c31a36dfe32f4b836def16a226eced9c1e831943d8abab56c1017f"}}, {"doc_uuid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748", "index": 4, "content": " fn write_function_argument(\n &self,\n conv: CallingConvention,\n idx: i32,\n val: T,\n ) -> Result<(), String>\n where\n T: Into,\n {\n if conv != CallingConvention::Cdecl {\n return Err(format!(\"Unsupported calling convention: {conv:#?}\"));\n }\n\n let val: GuestReg = val.into();\n match idx {\n 0 => self.write_reg(Regs::Rdi, val),\n 1 => self.write_reg(Regs::Rsi, val),\n _ => Err(format!(\"Unsupported argument: {idx:}\")),\n }\n }\n}\n", "meta": {"hash_id": "d2af43b161551eeb25342bc831785d240519bc9fb1be74d5676e0b21e788cb95"}}]}], "golden_chunks": [{"doc_uuid": "86e39b19ca47c979baa00968bc37f96da0b379d1e2a30e8407738bdce8e98748", "index": 0, "content": "use std::{mem::size_of, sync::OnceLock};\n\nuse capstone::arch::BuildsCapstone;\nuse enum_map::{enum_map, EnumMap};\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\npub use strum_macros::EnumIter;\npub use syscall_numbers::x86_64::*;\n\nuse crate::{sync_backdoor::BackdoorArgs, CallingConvention};\n\n#[derive(IntoPrimitive, TryFromPrimitive, Debug, Clone, Copy, EnumIter)]\n#[repr(i32)]\npub enum Regs {\n Rax = 0,\n Rbx = 1,\n Rcx = 2,\n Rdx = 3,\n Rsi = 4,\n Rdi = 5,\n Rbp = 6,\n Rsp = 7,\n R8 = 8,\n R9 = 9,\n R10 = 10,\n R11 = 11,\n R12 = 12,\n R13 = 13,\n R14 = 14,\n R15 = 15,\n Rip = 16,\n Rflags = 17,\n}\n\nstatic BACKDOOR_ARCH_REGS: OnceLock> = OnceLock::new();\n\n", "meta": {"hash_id": "ba344baa82e01ee5d702301a09402ca83c9c5a6e9310a4d647d06ce1bda79219"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl_qemu/src/x86_64.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How do you convert a `NautilusInput` to a `BytesInput`?", "answer": "You can use the `NautilusToBytesInputConverter` to convert a `NautilusInput` to a `BytesInput`. First, create an instance of `NautilusToBytesInputConverter` with a `NautilusContext`:\n```rust\nlet converter = NautilusToBytesInputConverter::new(&context);\n```\nThen, call the `convert` method on the converter, passing in the `NautilusInput`:\n```rust\nlet bytes_input = converter.convert(nautilus_input)?;\n```\nThe `convert` method internally calls the `unparse` method of `NautilusInput` to convert the input to bytes:\n```rust\nfn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n}\n```", "golden_doc_uuids": ["f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642"], "golden_chunk_uuids": [["f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", 4]], "golden_documents": [{"uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\nimpl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n /// Create an empty [`Input`]\n #[must_use]\n pub fn empty() -> Self {\n Self {\n tree: Tree {\n rules: vec![],\n sizes: vec![],\n paren: vec![],\n },\n }\n }\n\n /// Generate a `Nautilus` input from the given bytes\n pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) {\n bytes.clear();\n self.tree.unparse(NodeID::from(0), &context.ctx, bytes);\n }\n\n /// Get the tree representation of this input\n #[must_use]\n pub fn tree(&self) -> &Tree {\n &self.tree\n }\n\n /// Get the tree representation of this input, as a mutable reference\n #[must_use]\n pub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n }\n}\n\nimpl Hash for NautilusInput {\n fn hash(&self, state: &mut H) {\n self.tree().paren.hash(state);\n for r in &self.tree().rules {\n match r {\n RuleIDOrCustom::Custom(a, b) => {\n a.hash(state);\n b.hash(state);\n }\n RuleIDOrCustom::Rule(a) => a.hash(state),\n }\n }\n self.tree().sizes.hash(state);\n }\n}\n\n/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl/src/inputs/nautilus.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 0, "content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\n", "meta": {"hash_id": "9a0b685f11360b72afc58fa663e43ac5718bfeee3b3cb7ceed49a23c66d15847"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 1, "content": "impl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n", "meta": {"hash_id": "76d76f407600615137bfc8aa7c237c464cdf9a9bfa25a69d6fe5f178b8b1f085"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 2, "content": " /// Create an empty [`Input`]\n #[must_use]\n pub fn empty() -> Self {\n Self {\n tree: Tree {\n rules: vec![],\n sizes: vec![],\n paren: vec![],\n },\n }\n }\n\n /// Generate a `Nautilus` input from the given bytes\n pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) {\n bytes.clear();\n self.tree.unparse(NodeID::from(0), &context.ctx, bytes);\n }\n\n", "meta": {"hash_id": "557cf518085e3a677ec7756d4a75832f4cb0ffe703934c24f2b851513bb81327"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 3, "content": " /// Get the tree representation of this input\n #[must_use]\n pub fn tree(&self) -> &Tree {\n &self.tree\n }\n\n /// Get the tree representation of this input, as a mutable reference\n #[must_use]\n pub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n }\n}\n\nimpl Hash for NautilusInput {\n fn hash(&self, state: &mut H) {\n self.tree().paren.hash(state);\n for r in &self.tree().rules {\n match r {\n RuleIDOrCustom::Custom(a, b) => {\n a.hash(state);\n b.hash(state);\n }\n RuleIDOrCustom::Rule(a) => a.hash(state),\n }\n }\n self.tree().sizes.hash(state);\n }\n}\n\n", "meta": {"hash_id": "67d8e90e8b20338f7e7a875dc5fe6b87b9b8491fb7b29151b48c56fd3987c6b6"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 4, "content": "/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n", "meta": {"hash_id": "3b345af60e10608c8807f2e9e059d6e618458fe74f5845ab0fd7149d1597c582"}}]}], "golden_chunks": [{"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 4, "content": "/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n", "meta": {"hash_id": "3b345af60e10608c8807f2e9e059d6e618458fe74f5845ab0fd7149d1597c582"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl/src/inputs/nautilus.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How do you get the `Tree` representation of a `NautilusInput`?", "answer": "You can get the `Tree` representation of a `NautilusInput` using the `tree` method, which returns an immutable reference to the `Tree`:\n```rust\npub fn tree(&self) -> &Tree {\n &self.tree\n}\n```\nIf you need a mutable reference to the `Tree`, you can use the `tree_mut` method instead:\n```rust\npub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n}\n```", "golden_doc_uuids": ["f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642"], "golden_chunk_uuids": [["f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", 0]], "golden_documents": [{"uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\nimpl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n /// Create an empty [`Input`]\n #[must_use]\n pub fn empty() -> Self {\n Self {\n tree: Tree {\n rules: vec![],\n sizes: vec![],\n paren: vec![],\n },\n }\n }\n\n /// Generate a `Nautilus` input from the given bytes\n pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) {\n bytes.clear();\n self.tree.unparse(NodeID::from(0), &context.ctx, bytes);\n }\n\n /// Get the tree representation of this input\n #[must_use]\n pub fn tree(&self) -> &Tree {\n &self.tree\n }\n\n /// Get the tree representation of this input, as a mutable reference\n #[must_use]\n pub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n }\n}\n\nimpl Hash for NautilusInput {\n fn hash(&self, state: &mut H) {\n self.tree().paren.hash(state);\n for r in &self.tree().rules {\n match r {\n RuleIDOrCustom::Custom(a, b) => {\n a.hash(state);\n b.hash(state);\n }\n RuleIDOrCustom::Rule(a) => a.hash(state),\n }\n }\n self.tree().sizes.hash(state);\n }\n}\n\n/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl/src/inputs/nautilus.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 0, "content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\n", "meta": {"hash_id": "9a0b685f11360b72afc58fa663e43ac5718bfeee3b3cb7ceed49a23c66d15847"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 1, "content": "impl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n", "meta": {"hash_id": "76d76f407600615137bfc8aa7c237c464cdf9a9bfa25a69d6fe5f178b8b1f085"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 2, "content": " /// Create an empty [`Input`]\n #[must_use]\n pub fn empty() -> Self {\n Self {\n tree: Tree {\n rules: vec![],\n sizes: vec![],\n paren: vec![],\n },\n }\n }\n\n /// Generate a `Nautilus` input from the given bytes\n pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) {\n bytes.clear();\n self.tree.unparse(NodeID::from(0), &context.ctx, bytes);\n }\n\n", "meta": {"hash_id": "557cf518085e3a677ec7756d4a75832f4cb0ffe703934c24f2b851513bb81327"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 3, "content": " /// Get the tree representation of this input\n #[must_use]\n pub fn tree(&self) -> &Tree {\n &self.tree\n }\n\n /// Get the tree representation of this input, as a mutable reference\n #[must_use]\n pub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n }\n}\n\nimpl Hash for NautilusInput {\n fn hash(&self, state: &mut H) {\n self.tree().paren.hash(state);\n for r in &self.tree().rules {\n match r {\n RuleIDOrCustom::Custom(a, b) => {\n a.hash(state);\n b.hash(state);\n }\n RuleIDOrCustom::Rule(a) => a.hash(state),\n }\n }\n self.tree().sizes.hash(state);\n }\n}\n\n", "meta": {"hash_id": "67d8e90e8b20338f7e7a875dc5fe6b87b9b8491fb7b29151b48c56fd3987c6b6"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 4, "content": "/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n", "meta": {"hash_id": "3b345af60e10608c8807f2e9e059d6e618458fe74f5845ab0fd7149d1597c582"}}]}], "golden_chunks": [{"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 0, "content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\n", "meta": {"hash_id": "9a0b685f11360b72afc58fa663e43ac5718bfeee3b3cb7ceed49a23c66d15847"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl/src/inputs/nautilus.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "What traits does `NautilusInput` implement?", "answer": "The `NautilusInput` struct implements several traits:\n- `Input`: Allows `NautilusInput` to be used as an input type in the fuzzer.\n- `From for Rc>`: Enables creating an `Rc>` from a `NautilusInput`.\n- `HasLen`: Provides a `len` method that returns the size of the input's `Tree`.\n- `Hash`: Allows hashing of `NautilusInput` instances based on their `Tree` structure.", "golden_doc_uuids": ["f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642"], "golden_chunk_uuids": [["f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", 1]], "golden_documents": [{"uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\nimpl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n /// Create an empty [`Input`]\n #[must_use]\n pub fn empty() -> Self {\n Self {\n tree: Tree {\n rules: vec![],\n sizes: vec![],\n paren: vec![],\n },\n }\n }\n\n /// Generate a `Nautilus` input from the given bytes\n pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) {\n bytes.clear();\n self.tree.unparse(NodeID::from(0), &context.ctx, bytes);\n }\n\n /// Get the tree representation of this input\n #[must_use]\n pub fn tree(&self) -> &Tree {\n &self.tree\n }\n\n /// Get the tree representation of this input, as a mutable reference\n #[must_use]\n pub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n }\n}\n\nimpl Hash for NautilusInput {\n fn hash(&self, state: &mut H) {\n self.tree().paren.hash(state);\n for r in &self.tree().rules {\n match r {\n RuleIDOrCustom::Custom(a, b) => {\n a.hash(state);\n b.hash(state);\n }\n RuleIDOrCustom::Rule(a) => a.hash(state),\n }\n }\n self.tree().sizes.hash(state);\n }\n}\n\n/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/libafl/src/inputs/nautilus.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 0, "content": "//! Input for the [`Nautilus`](https://github.com/RUB-SysSec/nautilus) grammar fuzzer methods\n//!\n\n//use ahash::AHasher;\n//use core::hash::Hasher;\n\nuse alloc::{rc::Rc, string::String, vec::Vec};\nuse core::cell::RefCell;\nuse std::hash::{Hash, Hasher};\n\nuse grammartec::{\n newtypes::NodeID,\n rule::RuleIDOrCustom,\n tree::{Tree, TreeLike},\n};\nuse libafl_bolts::HasLen;\nuse serde::{Deserialize, Serialize};\n\nuse crate::{\n generators::nautilus::NautilusContext,\n inputs::{BytesInput, Input, InputConverter},\n Error,\n};\n\n/// An [`Input`] implementation for `Nautilus` grammar.\n#[derive(Serialize, Deserialize, Clone, Debug)]\npub struct NautilusInput {\n /// The input representation as Tree\n pub tree: Tree,\n}\n\n", "meta": {"hash_id": "9a0b685f11360b72afc58fa663e43ac5718bfeee3b3cb7ceed49a23c66d15847"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 1, "content": "impl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n", "meta": {"hash_id": "76d76f407600615137bfc8aa7c237c464cdf9a9bfa25a69d6fe5f178b8b1f085"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 2, "content": " /// Create an empty [`Input`]\n #[must_use]\n pub fn empty() -> Self {\n Self {\n tree: Tree {\n rules: vec![],\n sizes: vec![],\n paren: vec![],\n },\n }\n }\n\n /// Generate a `Nautilus` input from the given bytes\n pub fn unparse(&self, context: &NautilusContext, bytes: &mut Vec) {\n bytes.clear();\n self.tree.unparse(NodeID::from(0), &context.ctx, bytes);\n }\n\n", "meta": {"hash_id": "557cf518085e3a677ec7756d4a75832f4cb0ffe703934c24f2b851513bb81327"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 3, "content": " /// Get the tree representation of this input\n #[must_use]\n pub fn tree(&self) -> &Tree {\n &self.tree\n }\n\n /// Get the tree representation of this input, as a mutable reference\n #[must_use]\n pub fn tree_mut(&mut self) -> &mut Tree {\n &mut self.tree\n }\n}\n\nimpl Hash for NautilusInput {\n fn hash(&self, state: &mut H) {\n self.tree().paren.hash(state);\n for r in &self.tree().rules {\n match r {\n RuleIDOrCustom::Custom(a, b) => {\n a.hash(state);\n b.hash(state);\n }\n RuleIDOrCustom::Rule(a) => a.hash(state),\n }\n }\n self.tree().sizes.hash(state);\n }\n}\n\n", "meta": {"hash_id": "67d8e90e8b20338f7e7a875dc5fe6b87b9b8491fb7b29151b48c56fd3987c6b6"}}, {"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 4, "content": "/// `InputConverter` to convert from `NautilusInput` to `BytesInput`\n#[derive(Debug)]\npub struct NautilusToBytesInputConverter<'a> {\n ctx: &'a NautilusContext,\n}\n\nimpl<'a> NautilusToBytesInputConverter<'a> {\n #[must_use]\n /// Create a new `NautilusToBytesInputConverter` from a context\n pub fn new(ctx: &'a NautilusContext) -> Self {\n Self { ctx }\n }\n}\n\nimpl<'a> InputConverter for NautilusToBytesInputConverter<'a> {\n type From = NautilusInput;\n type To = BytesInput;\n\n fn convert(&mut self, input: Self::From) -> Result {\n let mut bytes = vec![];\n input.unparse(self.ctx, &mut bytes);\n Ok(BytesInput::new(bytes))\n }\n}\n", "meta": {"hash_id": "3b345af60e10608c8807f2e9e059d6e618458fe74f5845ab0fd7149d1597c582"}}]}], "golden_chunks": [{"doc_uuid": "f53f33e3403059a8fa7d21b3037c33c6a797fa43c38bca3e16adedee20780642", "index": 1, "content": "impl Input for NautilusInput {\n /// Generate a name for this input\n #[must_use]\n fn generate_name(&self, idx: usize) -> String {\n /*let mut hasher = AHasher::new_with_keys(0, 0);\n for term in &self.terms {\n hasher.write(term.symbol.as_bytes());\n }\n format!(\"{:016x}\", hasher.finish())*/\n format!(\"id:{idx}\")\n }\n}\n\n/// Rc Ref-cell from Input\nimpl From for Rc> {\n fn from(input: NautilusInput) -> Self {\n Rc::new(RefCell::new(input))\n }\n}\n\nimpl HasLen for NautilusInput {\n #[inline]\n fn len(&self) -> usize {\n self.tree.size()\n }\n}\n\nimpl NautilusInput {\n /// Creates a new codes input using the given terminals\n #[must_use]\n pub fn new(tree: Tree) -> Self {\n Self { tree }\n }\n\n", "meta": {"hash_id": "76d76f407600615137bfc8aa7c237c464cdf9a9bfa25a69d6fe5f178b8b1f085"}}], "meta": {"doctype": "codebase", "relative_path": "/libafl/src/inputs/nautilus.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How do you initialize the logger?", "answer": "To initialize the logger, you need to call the Logger::init function and pass an instance that implements the LogArgs trait. This can be seen in the function signature: `pub fn init(args: &impl LogArgs) -> Result<()>`.", "golden_doc_uuids": ["17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2"], "golden_chunk_uuids": [["17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", 0]], "golden_documents": [{"uuid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/utils/gdb_qemu/gdb_qemu/src/logger.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", "index": 0, "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", "meta": {"hash_id": "27c8246abd815dc96eed8c997eb8d5dbf656beb900323878193e1b7fdeecad1d"}}]}], "golden_chunks": [{"doc_uuid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", "index": 0, "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", "meta": {"hash_id": "27c8246abd815dc96eed8c997eb8d5dbf656beb900323878193e1b7fdeecad1d"}}], "meta": {"doctype": "codebase", "relative_path": "/utils/gdb_qemu/gdb_qemu/src/logger.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How is the log file created?", "answer": "The log file is created using `File::create(args.log_file())`. It takes the log file path obtained from `args.log_file()` and attempts to create the file. If the file creation fails, an error is returned using `map_err()` with a custom error message: `map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?`.", "golden_doc_uuids": ["17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2"], "golden_chunk_uuids": [["17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", 0]], "golden_documents": [{"uuid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/utils/gdb_qemu/gdb_qemu/src/logger.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", "index": 0, "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", "meta": {"hash_id": "27c8246abd815dc96eed8c997eb8d5dbf656beb900323878193e1b7fdeecad1d"}}]}], "golden_chunks": [{"doc_uuid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", "index": 0, "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", "meta": {"hash_id": "27c8246abd815dc96eed8c997eb8d5dbf656beb900323878193e1b7fdeecad1d"}}], "meta": {"doctype": "codebase", "relative_path": "/utils/gdb_qemu/gdb_qemu/src/logger.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What logger implementation is being used?", "answer": "The code uses the `simplelog` crate for logging. Specifically, it uses the `WriteLogger` from `simplelog`, as shown in the line: `WriteLogger::init(filter, Config::default(), logfile)`.", "golden_doc_uuids": ["17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2"], "golden_chunk_uuids": [["17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", 0]], "golden_documents": [{"uuid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/utils/gdb_qemu/gdb_qemu/src/logger.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", "index": 0, "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", "meta": {"hash_id": "27c8246abd815dc96eed8c997eb8d5dbf656beb900323878193e1b7fdeecad1d"}}]}], "golden_chunks": [{"doc_uuid": "17f3b912090b0ab395e7ceed8c88c38cea8c99bc292e3d94feec7a7dcbdf3ee2", "index": 0, "content": "use {\n crate::args::LogArgs,\n anyhow::{anyhow, Result},\n simplelog::{Config, LevelFilter, WriteLogger},\n std::fs::File,\n};\n\npub struct Logger;\n\nimpl Logger {\n pub fn init(args: &impl LogArgs) -> Result<()> {\n let filter: LevelFilter = args.log_level().into();\n if filter != LevelFilter::Off {\n let logfile = File::create(args.log_file())\n .map_err(|e| anyhow!(\"Failed to open log file: {e:}\"))?;\n WriteLogger::init(filter, Config::default(), logfile)\n .map_err(|e| anyhow!(\"Failed to initalize logger: {e:}\"))?;\n }\n Ok(())\n }\n}\n", "meta": {"hash_id": "27c8246abd815dc96eed8c997eb8d5dbf656beb900323878193e1b7fdeecad1d"}}], "meta": {"doctype": "codebase", "relative_path": "/utils/gdb_qemu/gdb_qemu/src/logger.rs", "repo_name": "AFLplusplus/LibAFL", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How do you register a new type in the Registry?", "answer": "To register a new type in the Registry, you can use either the `register` or `register_multi` methods. The `register` method is used for registering a single type, while `register_multi` is used for registering multiple types at once. For example:\n```python\nregistry.register(MyType)\n```\nor\n```python\n@registry.register_multi(arg1, arg2)\nclass MyType:\n pass\n```\nThe `_real_register` method is called internally to handle the actual registration process.", "golden_doc_uuids": ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4"], "golden_chunk_uuids": [["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 7]], "golden_documents": [{"uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/iface/_registry.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 0, "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n", "meta": {"hash_id": "5fe3bf2b876b96eecceb4ff3af1993aa1d47ad9c37be52dfc857a4ed0a534c71"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 1, "content": " _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n", "meta": {"hash_id": "63a84d34317784c1b9ea8759ad11da6cebe36ea36f9eb263b11fa42d6a7a61b2"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 2, "content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n", "meta": {"hash_id": "9f86c97a7e146234064b887b2ff9ef65f292e08f7af8a9245c3c6f6b8d9d506a"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 3, "content": " # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n", "meta": {"hash_id": "f1af0b19e143e875d6de799106ca91f803653572d78c31ac64ce99383b71e448"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 4, "content": " continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n", "meta": {"hash_id": "41a8a9686139a61470f987183ed8112a4e8752f8f7fc990b294e8bda42ac82a5"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 5, "content": " # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n", "meta": {"hash_id": "4307e5ad9914fd61209f6f900e7efd1b0ffe1eca4f1acc0f153f82357d3e4fb9"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 6, "content": " # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n", "meta": {"hash_id": "d35e73b3fb89033ae671f62331c350124f615852b9b1013b89bdebdd6a3456fa"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 7, "content": " self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n", "meta": {"hash_id": "132024e060f4231528f9daf27c4a6c8d4c1c7873c952f51c2e6665630cc32dda"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 8, "content": " target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n", "meta": {"hash_id": "9e160b9b0bc7878d8e9aabb60a1d12697dc351d0de17921b64bc0145c9c50c5d"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 9, "content": " def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "meta": {"hash_id": "647edf38d304d853aaecbb164d62a98e5c2d592bde3e9eb3bdf05be864baff57"}}]}], "golden_chunks": [{"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 7, "content": " self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n", "meta": {"hash_id": "132024e060f4231528f9daf27c4a6c8d4c1c7873c952f51c2e6665630cc32dda"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/iface/_registry.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `_real_register` method?", "answer": "The `_real_register` method is responsible for the actual registration of types in the Registry. It performs various checks and operations based on the input type and any additional arguments provided. It determines the module type and base class of the input type, handles special cases like `PolymorphicChecker`, and registers the type in the appropriate data structures within the Registry. It also updates the `_names` and `_targets` dictionaries to maintain associations between names, types, and targets.", "golden_doc_uuids": ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4"], "golden_chunk_uuids": [["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 2]], "golden_documents": [{"uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/iface/_registry.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 0, "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n", "meta": {"hash_id": "5fe3bf2b876b96eecceb4ff3af1993aa1d47ad9c37be52dfc857a4ed0a534c71"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 1, "content": " _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n", "meta": {"hash_id": "63a84d34317784c1b9ea8759ad11da6cebe36ea36f9eb263b11fa42d6a7a61b2"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 2, "content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n", "meta": {"hash_id": "9f86c97a7e146234064b887b2ff9ef65f292e08f7af8a9245c3c6f6b8d9d506a"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 3, "content": " # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n", "meta": {"hash_id": "f1af0b19e143e875d6de799106ca91f803653572d78c31ac64ce99383b71e448"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 4, "content": " continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n", "meta": {"hash_id": "41a8a9686139a61470f987183ed8112a4e8752f8f7fc990b294e8bda42ac82a5"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 5, "content": " # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n", "meta": {"hash_id": "4307e5ad9914fd61209f6f900e7efd1b0ffe1eca4f1acc0f153f82357d3e4fb9"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 6, "content": " # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n", "meta": {"hash_id": "d35e73b3fb89033ae671f62331c350124f615852b9b1013b89bdebdd6a3456fa"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 7, "content": " self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n", "meta": {"hash_id": "132024e060f4231528f9daf27c4a6c8d4c1c7873c952f51c2e6665630cc32dda"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 8, "content": " target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n", "meta": {"hash_id": "9e160b9b0bc7878d8e9aabb60a1d12697dc351d0de17921b64bc0145c9c50c5d"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 9, "content": " def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "meta": {"hash_id": "647edf38d304d853aaecbb164d62a98e5c2d592bde3e9eb3bdf05be864baff57"}}]}], "golden_chunks": [{"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 2, "content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n", "meta": {"hash_id": "9f86c97a7e146234064b887b2ff9ef65f292e08f7af8a9245c3c6f6b8d9d506a"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/iface/_registry.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "How can you retrieve registered types from the Registry?", "answer": "The Registry provides several methods to retrieve registered types:\n- The `__getitem__` method allows you to retrieve types based on a specific type or generic type. For example:\n ```python\n registry[MyType]\n registry[Checker[int]]\n ```\n- The `get_named` method allows you to retrieve a type by its name. For example:\n ```python\n registry.get_named(\"mytype\")\n ```\n- The `get_targeted` method allows you to retrieve types associated with a specific target. For example:\n ```python\n registry.get_targeted(\"mytarget\")\n ```\n- The `get_all_names` method returns a list of all registered type names.", "golden_doc_uuids": ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4"], "golden_chunk_uuids": [["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 6], ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 5], ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 4], ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 3], ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 2], ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 1], ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 0]], "golden_documents": [{"uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/iface/_registry.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 0, "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n", "meta": {"hash_id": "5fe3bf2b876b96eecceb4ff3af1993aa1d47ad9c37be52dfc857a4ed0a534c71"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 1, "content": " _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n", "meta": {"hash_id": "63a84d34317784c1b9ea8759ad11da6cebe36ea36f9eb263b11fa42d6a7a61b2"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 2, "content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n", "meta": {"hash_id": "9f86c97a7e146234064b887b2ff9ef65f292e08f7af8a9245c3c6f6b8d9d506a"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 3, "content": " # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n", "meta": {"hash_id": "f1af0b19e143e875d6de799106ca91f803653572d78c31ac64ce99383b71e448"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 4, "content": " continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n", "meta": {"hash_id": "41a8a9686139a61470f987183ed8112a4e8752f8f7fc990b294e8bda42ac82a5"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 5, "content": " # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n", "meta": {"hash_id": "4307e5ad9914fd61209f6f900e7efd1b0ffe1eca4f1acc0f153f82357d3e4fb9"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 6, "content": " # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n", "meta": {"hash_id": "d35e73b3fb89033ae671f62331c350124f615852b9b1013b89bdebdd6a3456fa"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 7, "content": " self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n", "meta": {"hash_id": "132024e060f4231528f9daf27c4a6c8d4c1c7873c952f51c2e6665630cc32dda"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 8, "content": " target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n", "meta": {"hash_id": "9e160b9b0bc7878d8e9aabb60a1d12697dc351d0de17921b64bc0145c9c50c5d"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 9, "content": " def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "meta": {"hash_id": "647edf38d304d853aaecbb164d62a98e5c2d592bde3e9eb3bdf05be864baff57"}}]}], "golden_chunks": [{"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 6, "content": " # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n", "meta": {"hash_id": "d35e73b3fb89033ae671f62331c350124f615852b9b1013b89bdebdd6a3456fa"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 5, "content": " # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n", "meta": {"hash_id": "4307e5ad9914fd61209f6f900e7efd1b0ffe1eca4f1acc0f153f82357d3e4fb9"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 4, "content": " continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n", "meta": {"hash_id": "41a8a9686139a61470f987183ed8112a4e8752f8f7fc990b294e8bda42ac82a5"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 3, "content": " # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n", "meta": {"hash_id": "f1af0b19e143e875d6de799106ca91f803653572d78c31ac64ce99383b71e448"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 2, "content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n", "meta": {"hash_id": "9f86c97a7e146234064b887b2ff9ef65f292e08f7af8a9245c3c6f6b8d9d506a"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 1, "content": " _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n", "meta": {"hash_id": "63a84d34317784c1b9ea8759ad11da6cebe36ea36f9eb263b11fa42d6a7a61b2"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 0, "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n", "meta": {"hash_id": "5fe3bf2b876b96eecceb4ff3af1993aa1d47ad9c37be52dfc857a4ed0a534c71"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/iface/_registry.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "How are targeted types handled in the Registry?", "answer": "Targeted types, which are types that inherit from the `Targeted` class, are handled specially in the Registry. When a targeted type is registered, the Registry stores the association between the target and the module type in the `_targets` dictionary. For example:\n```python\nif target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n```\nThis allows for retrieval of targeted types using the `get_targeted` method based on the target and an optional type constraint.", "golden_doc_uuids": ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4"], "golden_chunk_uuids": [["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 7]], "golden_documents": [{"uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/iface/_registry.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 0, "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n", "meta": {"hash_id": "5fe3bf2b876b96eecceb4ff3af1993aa1d47ad9c37be52dfc857a4ed0a534c71"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 1, "content": " _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n", "meta": {"hash_id": "63a84d34317784c1b9ea8759ad11da6cebe36ea36f9eb263b11fa42d6a7a61b2"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 2, "content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n", "meta": {"hash_id": "9f86c97a7e146234064b887b2ff9ef65f292e08f7af8a9245c3c6f6b8d9d506a"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 3, "content": " # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n", "meta": {"hash_id": "f1af0b19e143e875d6de799106ca91f803653572d78c31ac64ce99383b71e448"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 4, "content": " continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n", "meta": {"hash_id": "41a8a9686139a61470f987183ed8112a4e8752f8f7fc990b294e8bda42ac82a5"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 5, "content": " # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n", "meta": {"hash_id": "4307e5ad9914fd61209f6f900e7efd1b0ffe1eca4f1acc0f153f82357d3e4fb9"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 6, "content": " # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n", "meta": {"hash_id": "d35e73b3fb89033ae671f62331c350124f615852b9b1013b89bdebdd6a3456fa"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 7, "content": " self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n", "meta": {"hash_id": "132024e060f4231528f9daf27c4a6c8d4c1c7873c952f51c2e6665630cc32dda"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 8, "content": " target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n", "meta": {"hash_id": "9e160b9b0bc7878d8e9aabb60a1d12697dc351d0de17921b64bc0145c9c50c5d"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 9, "content": " def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "meta": {"hash_id": "647edf38d304d853aaecbb164d62a98e5c2d592bde3e9eb3bdf05be864baff57"}}]}], "golden_chunks": [{"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 7, "content": " self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n", "meta": {"hash_id": "132024e060f4231528f9daf27c4a6c8d4c1c7873c952f51c2e6665630cc32dda"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/iface/_registry.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `_modules` set in the Registry?", "answer": "The `_modules` set in the Registry is used to store the base classes that are considered as registrable modules. It includes classes like `Checker`, `Cracker`, `Decoder`, `ResourceLoader`, `Searcher`, and `PolymorphicChecker`. This set is used to determine the module type of a registered type during the registration process.", "golden_doc_uuids": ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4"], "golden_chunk_uuids": [["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 2], ["db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", 1]], "golden_documents": [{"uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/iface/_registry.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 0, "content": "from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union\n\ntry:\n from typing import get_args, get_origin\nexcept ImportError:\n from typing_inspect import get_origin, get_args\n\nfrom . import _fwd\nfrom ._modules import *\n\n\nclass Registry:\n # I was planning on using __init_subclass__, but that is incompatible with dynamic type creation when we have\n # generic keys\n\n RegElem = Union[List[Type], Dict[Type, \"RegElem\"]]\n\n", "meta": {"hash_id": "5fe3bf2b876b96eecceb4ff3af1993aa1d47ad9c37be52dfc857a4ed0a534c71"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 1, "content": " _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n", "meta": {"hash_id": "63a84d34317784c1b9ea8759ad11da6cebe36ea36f9eb263b11fa42d6a7a61b2"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 2, "content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n", "meta": {"hash_id": "9f86c97a7e146234064b887b2ff9ef65f292e08f7af8a9245c3c6f6b8d9d506a"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 3, "content": " # Work out what module type this is\n if len(args) == 0 and hasattr(input_type, \"__orig_bases__\"):\n for i in input_type.__orig_bases__:\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_base = get_origin(i)\n if module_base not in self._modules:\n continue\n module_type = i\n else:\n for i in self._modules:\n if not issubclass(input_type, i):\n", "meta": {"hash_id": "f1af0b19e143e875d6de799106ca91f803653572d78c31ac64ce99383b71e448"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 4, "content": " continue\n if module_type is not None:\n raise TypeError(\n f\"Type derived from multiple registrable base classes {i} and {module_type}\"\n )\n module_type = i\n if module_type is None:\n raise TypeError(\"No registrable base class\")\n\n", "meta": {"hash_id": "41a8a9686139a61470f987183ed8112a4e8752f8f7fc990b294e8bda42ac82a5"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 5, "content": " # Replace input type with polymorphic checker if required\n if issubclass(input_type, Checker):\n if len(args) == 0:\n arg = [\n get_args(i)\n for i in input_type.__orig_bases__\n if get_origin(i) == Checker\n ][0]\n if len(arg) != 1:\n raise TypeError(\"No argument for Checker\")\n input_type = input_type.convert({arg[0]})\n else:\n input_type = input_type.convert(set(args))\n self._register_one(input_type, PolymorphicChecker, [])\n # Refresh the names with the new type\n name_target = self._names[name] = (input_type, {PolymorphicChecker})\n\n", "meta": {"hash_id": "4307e5ad9914fd61209f6f900e7efd1b0ffe1eca4f1acc0f153f82357d3e4fb9"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 6, "content": " # Now handle the difference between register and register_multi\n if len(args) == 0:\n if module_type is PolymorphicChecker:\n module_base = PolymorphicChecker\n elif module_base is None:\n raise TypeError(\"No type argument given\")\n self._register_one(input_type, module_base, get_args(module_type))\n name_target[1].add(module_base)\n else:\n if module_base is not None:\n raise TypeError(f\"Redundant type argument for {module_type}\")\n module_base = module_type\n for module_args in args:\n # Correct missing brackets\n if not isinstance(module_args, tuple):\n module_args = (module_args,)\n\n", "meta": {"hash_id": "d35e73b3fb89033ae671f62331c350124f615852b9b1013b89bdebdd6a3456fa"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 7, "content": " self._register_one(input_type, module_base, module_args)\n name_target[1].add(module_type[module_args])\n\n name_target[1].add(module_type)\n\n if target is not None and issubclass(module_base, Targeted):\n self._targets.setdefault(target, {}).setdefault(module_type, []).append(\n input_type\n )\n\n return input_type\n\n def register(self, input_type):\n return self._real_register(input_type)\n\n def register_multi(self, *x):\n return lambda input_type: self._real_register(input_type, *x)\n\n def __getitem__(self, i: type) -> Optional[Any]:\n target_type = get_origin(i)\n # Check if this is a non-generic type, and return the whole dict if it is\n if target_type is None:\n return self._reg[i]\n\n", "meta": {"hash_id": "132024e060f4231528f9daf27c4a6c8d4c1c7873c952f51c2e6665630cc32dda"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 8, "content": " target_subtypes = get_args(i)\n target_list = self._reg.setdefault(target_type, {})\n for subtype in target_subtypes:\n target_list = target_list.setdefault(subtype, {})\n return target_list\n\n def get_named(self, name: str, type_constraint: Type = None) -> Any:\n ret = self._names[name.lower()]\n if type_constraint and type_constraint not in ret[1]:\n raise TypeError(f\"Type mismatch: wanted {type_constraint}, got {ret[1]}\")\n return ret[0]\n\n", "meta": {"hash_id": "9e160b9b0bc7878d8e9aabb60a1d12697dc351d0de17921b64bc0145c9c50c5d"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 9, "content": " def get_targeted(\n self, target: str, type_constraint: Type = None\n ) -> Optional[Union[Dict[Type, Set[Type]], Set[Type]]]:\n x = self._targets.get(target)\n if x is None or type_constraint is None:\n return x\n return x.get(type_constraint)\n\n def get_all_names(self) -> List[str]:\n return list(self._names.keys())\n\n def __str__(self):\n return f\"ciphey.iface.Registry {{_reg: {self._reg}, _names: {self._names}, _targets: {self._targets}}}\"\n\n\n_fwd.registry = Registry()\n", "meta": {"hash_id": "647edf38d304d853aaecbb164d62a98e5c2d592bde3e9eb3bdf05be864baff57"}}]}], "golden_chunks": [{"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 2, "content": " def _real_register(self, input_type: type, *args) -> Type:\n name = input_type.__name__.lower()\n name_target = self._names[name] = (input_type, set())\n\n if issubclass(input_type, Targeted):\n target = input_type.getTarget()\n else:\n target = None\n\n if issubclass(input_type, Searcher):\n module_type = module_base = Searcher\n module_args = ()\n else:\n module_type: Optional[Type] = None\n module_base = None\n\n", "meta": {"hash_id": "9f86c97a7e146234064b887b2ff9ef65f292e08f7af8a9245c3c6f6b8d9d506a"}}, {"doc_uuid": "db4820f398227726bb49a455d49677d8b6cce93fd915b4632ce34ca39c1684f4", "index": 1, "content": " _reg: Dict[Type, RegElem] = {}\n _names: Dict[str, Tuple[Type, Set[Type]]] = {}\n _targets: Dict[str, Dict[Type, List[Type]]] = {}\n _modules = {Checker, Cracker, Decoder, ResourceLoader, Searcher, PolymorphicChecker}\n\n def _register_one(self, input_type, module_base, module_args):\n if len(module_args) == 0:\n self._reg.setdefault(module_base, []).append(input_type)\n return\n\n target_reg = self._reg.setdefault(module_base, {})\n # Seek to the given type\n for subtype in module_args[0:-1]:\n target_reg = target_reg.setdefault(subtype, {})\n target_reg.setdefault(module_args[-1], []).append(input_type)\n\n", "meta": {"hash_id": "63a84d34317784c1b9ea8759ad11da6cebe36ea36f9eb263b11fa42d6a7a61b2"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/iface/_registry.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "What does the Octal class do?", "answer": "The Octal class is a Decoder that performs octal decoding on a given ciphertext string, as stated in the docstring: \"Performs Octal decoding\". It takes a ciphertext input of type T and returns the decoded result of type U if decoding is successful, or None if decoding fails.", "golden_doc_uuids": ["ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947"], "golden_chunk_uuids": [["ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", 0]], "golden_documents": [{"uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/octal.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 0, "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n", "meta": {"hash_id": "d591c99ea0ed1e245d2d01f5a3b397a90caabccb1a0c432dca631de3611a81c0"}}, {"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 1, "content": " return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", "meta": {"hash_id": "0a79f780977d44df645f887b1f977950cbb54c0ff8bee1a6278a0fdf7e02fc72"}}]}], "golden_chunks": [{"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 0, "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n", "meta": {"hash_id": "d591c99ea0ed1e245d2d01f5a3b397a90caabccb1a0c432dca631de3611a81c0"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/octal.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How does the decode method of the Octal class work?", "answer": "The decode method first splits the input ctext string by spaces into a list of octal sequences (octal_seq). If there is only one sequence and its length is not divisible by 3, it returns None since valid octal must be formed of triplets. It then tries to convert each octal character in octal_seq to an integer using base 8. If any subsequence is too long (>3 chars) or contains non-octal characters, it returns None. Otherwise, it appends the converted integer to str_converted. Finally, it returns the bytes representation of str_converted.", "golden_doc_uuids": ["ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947"], "golden_chunk_uuids": [["ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", 0]], "golden_documents": [{"uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/octal.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 0, "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n", "meta": {"hash_id": "d591c99ea0ed1e245d2d01f5a3b397a90caabccb1a0c432dca631de3611a81c0"}}, {"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 1, "content": " return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", "meta": {"hash_id": "0a79f780977d44df645f887b1f977950cbb54c0ff8bee1a6278a0fdf7e02fc72"}}]}], "golden_chunks": [{"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 0, "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n", "meta": {"hash_id": "d591c99ea0ed1e245d2d01f5a3b397a90caabccb1a0c432dca631de3611a81c0"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/octal.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What does the getTarget method of the Octal class do?", "answer": "The getTarget method simply returns the string \"octal\". Based on the @registry.register decorator used on the Octal class, this likely specifies the name or key used to look up this decoder in the Ciphey registry.", "golden_doc_uuids": ["ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947"], "golden_chunk_uuids": [["ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", 1]], "golden_documents": [{"uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/octal.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 0, "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n", "meta": {"hash_id": "d591c99ea0ed1e245d2d01f5a3b397a90caabccb1a0c432dca631de3611a81c0"}}, {"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 1, "content": " return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", "meta": {"hash_id": "0a79f780977d44df645f887b1f977950cbb54c0ff8bee1a6278a0fdf7e02fc72"}}]}], "golden_chunks": [{"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 1, "content": " return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", "meta": {"hash_id": "0a79f780977d44df645f887b1f977950cbb54c0ff8bee1a6278a0fdf7e02fc72"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/octal.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What external dependencies does the Octal class have?", "answer": "The Octal class imports typing for type hinting with Dict, Optional, logging for debug logging, RichHandler from rich.logging likely for formatted logging output, and Config, Decoder, ParamSpec, T, U, registry from ciphey.iface which are part of the Ciphey framework. The class does not seem to directly use RichHandler or Config in the provided code.", "golden_doc_uuids": ["ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947"], "golden_chunk_uuids": [["ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", 0]], "golden_documents": [{"uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/octal.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 0, "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n", "meta": {"hash_id": "d591c99ea0ed1e245d2d01f5a3b397a90caabccb1a0c432dca631de3611a81c0"}}, {"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 1, "content": " return None\n n = int(octal_char, 8)\n if (\n n < 0\n ): # n cannot be greater than 255, as we checked that with the earlier length check\n logging.debug(f\"Non octal char {octal_char}\")\n return None\n str_converted.append(n)\n\n return bytes(str_converted)\n # Catch bad octal chars\n except ValueError:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.025\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"octal\"\n", "meta": {"hash_id": "0a79f780977d44df645f887b1f977950cbb54c0ff8bee1a6278a0fdf7e02fc72"}}]}], "golden_chunks": [{"doc_uuid": "ea58fee353c3cce2856ea3e5cba5cad31eb25bef27af6a2828000a99cca9d947", "index": 0, "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Octal(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Octal decoding\n \"\"\"\n str_converted = []\n octal_seq = ctext.split(\" \")\n if len(octal_seq) == 1:\n # Concatted octal must be formed of octal triplets\n if len(ctext) % 3 != 0:\n return None\n octal_seq = [ctext[i : i + 3] for i in range(0, len(ctext), 3)]\n logging.debug(f\"Trying chunked octal {octal_seq}\")\n try:\n for octal_char in octal_seq:\n if len(octal_char) > 3:\n logging.debug(\"Octal subseq too long\")\n", "meta": {"hash_id": "d591c99ea0ed1e245d2d01f5a3b397a90caabccb1a0c432dca631de3611a81c0"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/octal.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How does the decode method of the A1z26 class work?", "answer": "The decode method splits the input ciphertext on various delimiters like spaces, commas, semicolons, colons, hyphens, and newlines using a regular expression. It then checks if the resulting substrings are numeric. If any substring is not numeric or outside the range 1 to 26, it returns None indicating decoding failure. Otherwise, it converts each numeric substring to its corresponding lowercase letter (1=a, 2=b, ..., 26=z) and joins them to produce the decoded plaintext.", "golden_doc_uuids": ["3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b"], "golden_chunk_uuids": [["3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", 0]], "golden_documents": [{"uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/a1z26.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 0, "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n", "meta": {"hash_id": "9d62fb0f40dd510a76f6df5288625cd0d642f0b62c8e76f1fe33ed8eb63a5367"}}, {"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 1, "content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"hash_id": "0971f506d218237fcc740b083fe519835888c910899e0f0ee4eb5e7a2d238832"}}]}], "golden_chunks": [{"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 0, "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n", "meta": {"hash_id": "9d62fb0f40dd510a76f6df5288625cd0d642f0b62c8e76f1fe33ed8eb63a5367"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/a1z26.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the priority method in the A1z26 class?", "answer": "The priority method returns a float value of 0.05. This likely indicates the priority or ordering in which this decoder will be tried relative to other decoders in the Ciphey framework.", "golden_doc_uuids": ["3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b"], "golden_chunk_uuids": [["3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", 1]], "golden_documents": [{"uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/a1z26.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 0, "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n", "meta": {"hash_id": "9d62fb0f40dd510a76f6df5288625cd0d642f0b62c8e76f1fe33ed8eb63a5367"}}, {"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 1, "content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"hash_id": "0971f506d218237fcc740b083fe519835888c910899e0f0ee4eb5e7a2d238832"}}]}], "golden_chunks": [{"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 1, "content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"hash_id": "0971f506d218237fcc740b083fe519835888c910899e0f0ee4eb5e7a2d238832"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/a1z26.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What does the getParams method do in the A1z26 class?", "answer": "The getParams method returns None, indicating that this decoder class does not accept any parameters for configuration. This can be seen in the method implementation:\n\n@staticmethod\ndef getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n", "golden_doc_uuids": ["3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b"], "golden_chunk_uuids": [["3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", 1]], "golden_documents": [{"uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/a1z26.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 0, "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n", "meta": {"hash_id": "9d62fb0f40dd510a76f6df5288625cd0d642f0b62c8e76f1fe33ed8eb63a5367"}}, {"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 1, "content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"hash_id": "0971f506d218237fcc740b083fe519835888c910899e0f0ee4eb5e7a2d238832"}}]}], "golden_chunks": [{"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 1, "content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"hash_id": "0971f506d218237fcc740b083fe519835888c910899e0f0ee4eb5e7a2d238832"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/a1z26.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the getTarget method?", "answer": "The getTarget method returns the string \"a1z26\". This likely specifies the target encoding or cipher that this class is intended to decode.", "golden_doc_uuids": ["3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b"], "golden_chunk_uuids": [["3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", 1]], "golden_documents": [{"uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/a1z26.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 0, "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n", "meta": {"hash_id": "9d62fb0f40dd510a76f6df5288625cd0d642f0b62c8e76f1fe33ed8eb63a5367"}}, {"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 1, "content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"hash_id": "0971f506d218237fcc740b083fe519835888c910899e0f0ee4eb5e7a2d238832"}}]}], "golden_chunks": [{"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 1, "content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"hash_id": "0971f506d218237fcc740b083fe519835888c910899e0f0ee4eb5e7a2d238832"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/a1z26.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How are the delimiters in the input ciphertext handled?", "answer": "The delimiters in the input ciphertext are identified using a regular expression that matches any non-alphanumeric characters: re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext). The sorted unique set of delimiter characters is stored in the delimiters variable and returned along with the decoded plaintext for informational purposes in the log message:\nlogging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n)", "golden_doc_uuids": ["3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b"], "golden_chunk_uuids": [["3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", 0]], "golden_documents": [{"uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/a1z26.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 0, "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n", "meta": {"hash_id": "9d62fb0f40dd510a76f6df5288625cd0d642f0b62c8e76f1fe33ed8eb63a5367"}}, {"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 1, "content": " f\"Failed to decode A1Z26 due to invalid number '{val}'\"\n )\n return None\n val2 = int(i) + 96\n ctext_converted.append(chr(val2))\n ctext_decoded = \"\".join(ctext_converted)\n logging.info(\n f\"A1Z26 successful, returning '{ctext_decoded}' with delimiter(s) {delimiters}\"\n )\n return ctext_decoded\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"a1z26\"\n", "meta": {"hash_id": "0971f506d218237fcc740b083fe519835888c910899e0f0ee4eb5e7a2d238832"}}]}], "golden_chunks": [{"doc_uuid": "3990e75dcaf5a10f72ca64cdf4ac7c3cbeb3e0e9c643269d774222d0b105bb7b", "index": 0, "content": "import re\nfrom typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass A1z26(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs A1Z26 decoding\n \"\"\"\n logging.debug(\"Attempting A1Z26\")\n ctext_converted = []\n ctext_split = re.split(r\"[ ,;:\\-\\n]\", ctext)\n delimiters = set(sorted(re.sub(r\"[^ ,;:\\-\\n]\", \"\", ctext)))\n ctext_num = re.sub(r\"[,;:\\-\\s]\", \"\", ctext)\n ctext_decoded = \"\"\n if ctext_num.isnumeric() is False:\n logging.debug(\"Failed to decode A1Z26 due to non numeric character(s)\")\n return None\n try:\n for i in ctext_split:\n val = int(i)\n if val > 26 or val < 1:\n logging.debug(\n", "meta": {"hash_id": "9d62fb0f40dd510a76f6df5288625cd0d642f0b62c8e76f1fe33ed8eb63a5367"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/a1z26.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the priority method in the Base58_ripple class?", "answer": "The priority method returns a float value of 0.05, indicating the priority of this decoder. The comment suggests that while Base58 (Ripple) encoding is not expected to show up often, it is very fast to check. The priority helps determine the order in which decoders are tried when attempting to decode a ciphertext.", "golden_doc_uuids": ["b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad"], "golden_chunk_uuids": [["b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", 0]], "golden_documents": [{"uuid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/base58_ripple.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", "index": 0, "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", "meta": {"hash_id": "9840be05ac89f68a66a18a3a43732acfa827dfd85ce4fa4b5558436c13cf9502"}}]}], "golden_chunks": [{"doc_uuid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", "index": 0, "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", "meta": {"hash_id": "9840be05ac89f68a66a18a3a43732acfa827dfd85ce4fa4b5558436c13cf9502"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/base58_ripple.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What does the getParams method of the Base58_ripple class do?", "answer": "The getParams method returns None, indicating that this decoder does not require any additional parameters to be specified. It is a static method decorated with @staticmethod.", "golden_doc_uuids": ["b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad"], "golden_chunk_uuids": [["b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", 0]], "golden_documents": [{"uuid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/base58_ripple.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", "index": 0, "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", "meta": {"hash_id": "9840be05ac89f68a66a18a3a43732acfa827dfd85ce4fa4b5558436c13cf9502"}}]}], "golden_chunks": [{"doc_uuid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", "index": 0, "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", "meta": {"hash_id": "9840be05ac89f68a66a18a3a43732acfa827dfd85ce4fa4b5558436c13cf9502"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/base58_ripple.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the getTarget method in the Base58_ripple class?", "answer": "The getTarget method is a static method that returns the string \"base58_ripple\". This likely serves as an identifier or target name for this specific decoder, allowing it to be easily referenced or searched for within the codebase.", "golden_doc_uuids": ["b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad"], "golden_chunk_uuids": [["b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", 0]], "golden_documents": [{"uuid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/base58_ripple.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", "index": 0, "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", "meta": {"hash_id": "9840be05ac89f68a66a18a3a43732acfa827dfd85ce4fa4b5558436c13cf9502"}}]}], "golden_chunks": [{"doc_uuid": "b9849c2091e8c45fe2589066b6c8ac5d95127a61895c7482b37250e853ab8aad", "index": 0, "content": "from typing import Dict, Optional\n\nimport base58\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, registry\n\n\n@registry.register\nclass Base58_ripple(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base58 (Ripple) decoding\n \"\"\"\n try:\n return base58.b58decode(ctext, alphabet=base58.RIPPLE_ALPHABET).decode(\n \"utf-8\"\n )\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n # Not expected to show up often, but also very fast to check.\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n\n @staticmethod\n def getTarget() -> str:\n return \"base58_ripple\"\n", "meta": {"hash_id": "9840be05ac89f68a66a18a3a43732acfa827dfd85ce4fa4b5558436c13cf9502"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/base58_ripple.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How are the character and word boundaries determined in the Morse code decoding process?", "answer": "The character and word boundaries are determined based on a priority list defined in the BOUNDARIES dictionary: `BOUNDARIES = {\" \": 1, \"/\": 2, \"\\n\": 3}`. The decode method iterates over each character in the ciphertext and updates the char_boundary and word_boundary variables based on the encountered characters and their priorities. The character with the highest priority is considered as the character boundary, and if a character with a higher priority than the current word boundary is found, it becomes the new word boundary.", "golden_doc_uuids": ["d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f"], "golden_chunk_uuids": [["d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", 0]], "golden_documents": [{"uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Morse_code(Decoder[str]):\n # A priority list for char/word boundaries\n BOUNDARIES = {\" \": 1, \"/\": 2, \"\\n\": 3}\n PURGE = {ord(c): None for c in BOUNDARIES.keys()}\n MAX_PRIORITY = 3\n ALLOWED = {\".\", \"-\", \" \", \"/\", \"\\n\"}\n MORSE_CODE_DICT: Dict[str, str]\n MORSE_CODE_DICT_INV: Dict[str, str]\n\n def decode(self, ctext: T) -> Optional[U]:\n logging.debug(\"Attempting Morse code decoder\")\n\n char_boundary = word_boundary = None\n\n char_boundary = word_boundary = None\n char_priority = word_priority = 0\n # Custom loop allows early break\n for i in ctext:\n i_priority = self.BOUNDARIES.get(i)\n if i_priority is None:\n if i in self.ALLOWED:\n continue\n logging.debug(f\"Non-morse char '{i}' found\")\n return None\n\n if i_priority <= char_priority or i == char_boundary or i == word_boundary:\n continue\n # Default to having a char boundary over a word boundary\n if (\n i_priority > word_priority\n and word_boundary is None\n and char_boundary is not None\n ):\n word_priority = i_priority\n word_boundary = i\n continue\n char_priority = i_priority\n char_boundary = i\n\n logging.debug(\n f\"Char boundary is unicode {ord(char_boundary)}, and word boundary is unicode {ord(word_boundary) if word_boundary is not None else None}\"\n )\n\n result = \"\"\n\n for word in ctext.split(word_boundary) if word_boundary else [ctext]:\n logging.debug(f\"Attempting to decode word {word}\")\n for char in word.split(char_boundary):\n char = char.translate(self.PURGE)\n if len(char) == 0:\n continue\n try:\n m = self.MORSE_CODE_DICT_INV[char]\n except KeyError:\n logging.debug(f\"Invalid codeword '{char}' found\")\n return None\n result = result + m\n # after every word add a space\n result = result + \" \"\n if len(result) == 0:\n logging.debug(\"Morse code failed to match\")\n return None\n # Remove trailing space\n result = result[:-1]\n logging.info(f\"Morse code successful, returning {result}\")\n return result.strip().upper()\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.MORSE_CODE_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.MORSE_CODE_DICT_INV = {v: k for k, v in self.MORSE_CODE_DICT.items()}\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The morse code dictionary to use\",\n req=False,\n default=\"cipheydists::translate::morse\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"morse_code\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/morse_code.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 0, "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Morse_code(Decoder[str]):\n # A priority list for char/word boundaries\n BOUNDARIES = {\" \": 1, \"/\": 2, \"\\n\": 3}\n PURGE = {ord(c): None for c in BOUNDARIES.keys()}\n MAX_PRIORITY = 3\n ALLOWED = {\".\", \"-\", \" \", \"/\", \"\\n\"}\n MORSE_CODE_DICT: Dict[str, str]\n MORSE_CODE_DICT_INV: Dict[str, str]\n\n def decode(self, ctext: T) -> Optional[U]:\n logging.debug(\"Attempting Morse code decoder\")\n\n char_boundary = word_boundary = None\n\n", "meta": {"hash_id": "7af079af585a57c0a6f11acbaa70e5e1ee8acad2f1f2c54d911b5c4e5ca051ad"}}, {"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 1, "content": " char_boundary = word_boundary = None\n char_priority = word_priority = 0\n # Custom loop allows early break\n for i in ctext:\n i_priority = self.BOUNDARIES.get(i)\n if i_priority is None:\n if i in self.ALLOWED:\n continue\n logging.debug(f\"Non-morse char '{i}' found\")\n return None\n\n", "meta": {"hash_id": "2ce25fccba717cc7dc3813251193b61abbd95314e2590d26b9fdfdfc2f15897f"}}, {"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 2, "content": " if i_priority <= char_priority or i == char_boundary or i == word_boundary:\n continue\n # Default to having a char boundary over a word boundary\n if (\n i_priority > word_priority\n and word_boundary is None\n and char_boundary is not None\n ):\n word_priority = i_priority\n word_boundary = i\n continue\n char_priority = i_priority\n char_boundary = i\n\n logging.debug(\n f\"Char boundary is unicode {ord(char_boundary)}, and word boundary is unicode {ord(word_boundary) if word_boundary is not None else None}\"\n )\n\n result = \"\"\n\n", "meta": {"hash_id": "0c0b567f01a3473a9bff6d39404a17bc6df3098bc79726b8ff17de01cfad4c8f"}}, {"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 3, "content": " for word in ctext.split(word_boundary) if word_boundary else [ctext]:\n logging.debug(f\"Attempting to decode word {word}\")\n for char in word.split(char_boundary):\n char = char.translate(self.PURGE)\n if len(char) == 0:\n continue\n try:\n m = self.MORSE_CODE_DICT_INV[char]\n except KeyError:\n logging.debug(f\"Invalid codeword '{char}' found\")\n return None\n result = result + m\n # after every word add a space\n result = result + \" \"\n if len(result) == 0:\n logging.debug(\"Morse code failed to match\")\n return None\n # Remove trailing space\n result = result[:-1]\n logging.info(f\"Morse code successful, returning {result}\")\n return result.strip().upper()\n\n", "meta": {"hash_id": "e399f57a57ad4eb7b1a0803fda407dd35a13cc9dae627573cc500c52c3eb8fd5"}}, {"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 4, "content": " @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.MORSE_CODE_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.MORSE_CODE_DICT_INV = {v: k for k, v in self.MORSE_CODE_DICT.items()}\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The morse code dictionary to use\",\n req=False,\n default=\"cipheydists::translate::morse\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"morse_code\"\n", "meta": {"hash_id": "1b99ed4a6ed443d23de10da65f8a291bf8b60345b101f2a36ab46142fd6693da"}}]}], "golden_chunks": [{"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 0, "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Morse_code(Decoder[str]):\n # A priority list for char/word boundaries\n BOUNDARIES = {\" \": 1, \"/\": 2, \"\\n\": 3}\n PURGE = {ord(c): None for c in BOUNDARIES.keys()}\n MAX_PRIORITY = 3\n ALLOWED = {\".\", \"-\", \" \", \"/\", \"\\n\"}\n MORSE_CODE_DICT: Dict[str, str]\n MORSE_CODE_DICT_INV: Dict[str, str]\n\n def decode(self, ctext: T) -> Optional[U]:\n logging.debug(\"Attempting Morse code decoder\")\n\n char_boundary = word_boundary = None\n\n", "meta": {"hash_id": "7af079af585a57c0a6f11acbaa70e5e1ee8acad2f1f2c54d911b5c4e5ca051ad"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/morse_code.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the priority method in the Morse_code class?", "answer": "The priority method returns a float value indicating the priority of the Morse_code decoder. It is defined as a static method and returns a value of 0.05. This priority value is used by the decoder registry to determine the order in which decoders are attempted.", "golden_doc_uuids": ["d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f"], "golden_chunk_uuids": [["d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", 4]], "golden_documents": [{"uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Morse_code(Decoder[str]):\n # A priority list for char/word boundaries\n BOUNDARIES = {\" \": 1, \"/\": 2, \"\\n\": 3}\n PURGE = {ord(c): None for c in BOUNDARIES.keys()}\n MAX_PRIORITY = 3\n ALLOWED = {\".\", \"-\", \" \", \"/\", \"\\n\"}\n MORSE_CODE_DICT: Dict[str, str]\n MORSE_CODE_DICT_INV: Dict[str, str]\n\n def decode(self, ctext: T) -> Optional[U]:\n logging.debug(\"Attempting Morse code decoder\")\n\n char_boundary = word_boundary = None\n\n char_boundary = word_boundary = None\n char_priority = word_priority = 0\n # Custom loop allows early break\n for i in ctext:\n i_priority = self.BOUNDARIES.get(i)\n if i_priority is None:\n if i in self.ALLOWED:\n continue\n logging.debug(f\"Non-morse char '{i}' found\")\n return None\n\n if i_priority <= char_priority or i == char_boundary or i == word_boundary:\n continue\n # Default to having a char boundary over a word boundary\n if (\n i_priority > word_priority\n and word_boundary is None\n and char_boundary is not None\n ):\n word_priority = i_priority\n word_boundary = i\n continue\n char_priority = i_priority\n char_boundary = i\n\n logging.debug(\n f\"Char boundary is unicode {ord(char_boundary)}, and word boundary is unicode {ord(word_boundary) if word_boundary is not None else None}\"\n )\n\n result = \"\"\n\n for word in ctext.split(word_boundary) if word_boundary else [ctext]:\n logging.debug(f\"Attempting to decode word {word}\")\n for char in word.split(char_boundary):\n char = char.translate(self.PURGE)\n if len(char) == 0:\n continue\n try:\n m = self.MORSE_CODE_DICT_INV[char]\n except KeyError:\n logging.debug(f\"Invalid codeword '{char}' found\")\n return None\n result = result + m\n # after every word add a space\n result = result + \" \"\n if len(result) == 0:\n logging.debug(\"Morse code failed to match\")\n return None\n # Remove trailing space\n result = result[:-1]\n logging.info(f\"Morse code successful, returning {result}\")\n return result.strip().upper()\n\n @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.MORSE_CODE_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.MORSE_CODE_DICT_INV = {v: k for k, v in self.MORSE_CODE_DICT.items()}\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The morse code dictionary to use\",\n req=False,\n default=\"cipheydists::translate::morse\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"morse_code\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/morse_code.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 0, "content": "from typing import Dict, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Morse_code(Decoder[str]):\n # A priority list for char/word boundaries\n BOUNDARIES = {\" \": 1, \"/\": 2, \"\\n\": 3}\n PURGE = {ord(c): None for c in BOUNDARIES.keys()}\n MAX_PRIORITY = 3\n ALLOWED = {\".\", \"-\", \" \", \"/\", \"\\n\"}\n MORSE_CODE_DICT: Dict[str, str]\n MORSE_CODE_DICT_INV: Dict[str, str]\n\n def decode(self, ctext: T) -> Optional[U]:\n logging.debug(\"Attempting Morse code decoder\")\n\n char_boundary = word_boundary = None\n\n", "meta": {"hash_id": "7af079af585a57c0a6f11acbaa70e5e1ee8acad2f1f2c54d911b5c4e5ca051ad"}}, {"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 1, "content": " char_boundary = word_boundary = None\n char_priority = word_priority = 0\n # Custom loop allows early break\n for i in ctext:\n i_priority = self.BOUNDARIES.get(i)\n if i_priority is None:\n if i in self.ALLOWED:\n continue\n logging.debug(f\"Non-morse char '{i}' found\")\n return None\n\n", "meta": {"hash_id": "2ce25fccba717cc7dc3813251193b61abbd95314e2590d26b9fdfdfc2f15897f"}}, {"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 2, "content": " if i_priority <= char_priority or i == char_boundary or i == word_boundary:\n continue\n # Default to having a char boundary over a word boundary\n if (\n i_priority > word_priority\n and word_boundary is None\n and char_boundary is not None\n ):\n word_priority = i_priority\n word_boundary = i\n continue\n char_priority = i_priority\n char_boundary = i\n\n logging.debug(\n f\"Char boundary is unicode {ord(char_boundary)}, and word boundary is unicode {ord(word_boundary) if word_boundary is not None else None}\"\n )\n\n result = \"\"\n\n", "meta": {"hash_id": "0c0b567f01a3473a9bff6d39404a17bc6df3098bc79726b8ff17de01cfad4c8f"}}, {"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 3, "content": " for word in ctext.split(word_boundary) if word_boundary else [ctext]:\n logging.debug(f\"Attempting to decode word {word}\")\n for char in word.split(char_boundary):\n char = char.translate(self.PURGE)\n if len(char) == 0:\n continue\n try:\n m = self.MORSE_CODE_DICT_INV[char]\n except KeyError:\n logging.debug(f\"Invalid codeword '{char}' found\")\n return None\n result = result + m\n # after every word add a space\n result = result + \" \"\n if len(result) == 0:\n logging.debug(\"Morse code failed to match\")\n return None\n # Remove trailing space\n result = result[:-1]\n logging.info(f\"Morse code successful, returning {result}\")\n return result.strip().upper()\n\n", "meta": {"hash_id": "e399f57a57ad4eb7b1a0803fda407dd35a13cc9dae627573cc500c52c3eb8fd5"}}, {"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 4, "content": " @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.MORSE_CODE_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.MORSE_CODE_DICT_INV = {v: k for k, v in self.MORSE_CODE_DICT.items()}\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The morse code dictionary to use\",\n req=False,\n default=\"cipheydists::translate::morse\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"morse_code\"\n", "meta": {"hash_id": "1b99ed4a6ed443d23de10da65f8a291bf8b60345b101f2a36ab46142fd6693da"}}]}], "golden_chunks": [{"doc_uuid": "d4bc89992e119e8c40738b830e03e9586e1cb958d4e30c96f935e7385841364f", "index": 4, "content": " @staticmethod\n def priority() -> float:\n return 0.05\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.MORSE_CODE_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.MORSE_CODE_DICT_INV = {v: k for k, v in self.MORSE_CODE_DICT.items()}\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The morse code dictionary to use\",\n req=False,\n default=\"cipheydists::translate::morse\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"morse_code\"\n", "meta": {"hash_id": "1b99ed4a6ed443d23de10da65f8a291bf8b60345b101f2a36ab46142fd6693da"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/morse_code.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "What does the `getInfo` method of the `Soundex` class do?", "answer": "The `getInfo` method of the `Soundex` class returns a `CrackInfo` object containing information about the likelihood and runtime of successfully cracking a Soundex-encoded ciphertext. Specifically, it returns a `CrackInfo` with `success_likelihood=0.1`, `success_runtime=1e-5`, and `failure_runtime=1e-5`.", "golden_doc_uuids": ["fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31"], "golden_chunk_uuids": [["fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", 0]], "golden_documents": [{"uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Crackers/soundex.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 6, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 0, "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n", "meta": {"hash_id": "d4e53c9f890933bcdad035e0f04d8edee8c3bbf5428a9e78d46eaf9e3077f4e4"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 1, "content": " def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n", "meta": {"hash_id": "f959fd100ae004424915db9f72cd1185233ecf6e5b0098056cb676123de45335"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 2, "content": " # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n", "meta": {"hash_id": "7d75952b52a8227663a53e4eb18c687e3b394c31ad4e40dbde696d8ed8bb1949"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 3, "content": " sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n", "meta": {"hash_id": "5e94ad3c3be95a611661e8fd6ed46d2d314a3718f7a1bbbcc8a6613bc461ed1d"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 4, "content": " def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n", "meta": {"hash_id": "82d9cdd15be688d06533d1616d7a0d0168c6bf5a6391656f8dad84b399d78586"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 5, "content": " for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"hash_id": "2573a4c0001ca85f1558dfc55b6b5cc10708aa2a39cbe0c467e221c4a3b0d6ac"}}]}], "golden_chunks": [{"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 0, "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n", "meta": {"hash_id": "d4e53c9f890933bcdad035e0f04d8edee8c3bbf5428a9e78d46eaf9e3077f4e4"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Crackers/soundex.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 6, "source": "codebases/jsonl"}} +{"query": "What does the `getTarget` method of the `Soundex` class return?", "answer": "The `getTarget` method of the `Soundex` class returns the string `\"soundex\"`. This indicates that the class is designed to crack Soundex-encoded ciphertexts.", "golden_doc_uuids": ["fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31"], "golden_chunk_uuids": [["fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", 0]], "golden_documents": [{"uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Crackers/soundex.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 6, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 0, "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n", "meta": {"hash_id": "d4e53c9f890933bcdad035e0f04d8edee8c3bbf5428a9e78d46eaf9e3077f4e4"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 1, "content": " def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n", "meta": {"hash_id": "f959fd100ae004424915db9f72cd1185233ecf6e5b0098056cb676123de45335"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 2, "content": " # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n", "meta": {"hash_id": "7d75952b52a8227663a53e4eb18c687e3b394c31ad4e40dbde696d8ed8bb1949"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 3, "content": " sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n", "meta": {"hash_id": "5e94ad3c3be95a611661e8fd6ed46d2d314a3718f7a1bbbcc8a6613bc461ed1d"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 4, "content": " def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n", "meta": {"hash_id": "82d9cdd15be688d06533d1616d7a0d0168c6bf5a6391656f8dad84b399d78586"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 5, "content": " for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"hash_id": "2573a4c0001ca85f1558dfc55b6b5cc10708aa2a39cbe0c467e221c4a3b0d6ac"}}]}], "golden_chunks": [{"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 0, "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n", "meta": {"hash_id": "d4e53c9f890933bcdad035e0f04d8edee8c3bbf5428a9e78d46eaf9e3077f4e4"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Crackers/soundex.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 6, "source": "codebases/jsonl"}} +{"query": "How does the `attemptCrack` method of the `Soundex` class attempt to crack a Soundex-encoded ciphertext?", "answer": "The `attemptCrack` method attempts to crack a Soundex-encoded ciphertext by generating all possible word combinations that match the Soundex codes. It does this by:\n1. Converting the ciphertext to uppercase and removing delimiters and whitespace\n2. Verifying that the ciphertext contains only A-Z and 0-9 characters\n3. Verifying that the length of the ciphertext is divisible by 4\n4. Splitting the ciphertext into groups of 4 characters\n5. Finding all words in a Soundex dictionary that correspond to each group of 4 characters\n6. Generating all possible sentences from the matched words\n7. Sorting the sentences based on word frequency\n8. Returning the sorted sentences as `CrackResult` objects", "golden_doc_uuids": ["fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31"], "golden_chunk_uuids": [["fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", 2]], "golden_documents": [{"uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Crackers/soundex.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 6, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 0, "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n", "meta": {"hash_id": "d4e53c9f890933bcdad035e0f04d8edee8c3bbf5428a9e78d46eaf9e3077f4e4"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 1, "content": " def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n", "meta": {"hash_id": "f959fd100ae004424915db9f72cd1185233ecf6e5b0098056cb676123de45335"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 2, "content": " # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n", "meta": {"hash_id": "7d75952b52a8227663a53e4eb18c687e3b394c31ad4e40dbde696d8ed8bb1949"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 3, "content": " sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n", "meta": {"hash_id": "5e94ad3c3be95a611661e8fd6ed46d2d314a3718f7a1bbbcc8a6613bc461ed1d"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 4, "content": " def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n", "meta": {"hash_id": "82d9cdd15be688d06533d1616d7a0d0168c6bf5a6391656f8dad84b399d78586"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 5, "content": " for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"hash_id": "2573a4c0001ca85f1558dfc55b6b5cc10708aa2a39cbe0c467e221c4a3b0d6ac"}}]}], "golden_chunks": [{"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 2, "content": " # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n", "meta": {"hash_id": "7d75952b52a8227663a53e4eb18c687e3b394c31ad4e40dbde696d8ed8bb1949"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Crackers/soundex.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 6, "source": "codebases/jsonl"}} +{"query": "What does the `sortlistwithdict` method of the `Soundex` class do?", "answer": "The `sortlistwithdict` method sorts a list of sentences based on the sum of the frequency ranks of each word in the sentence. It takes two arguments: `listtosort` (the list of sentences to sort) and `hashes` (a dictionary mapping each sentence to its frequency rank sum). It returns the sorted list of sentences.", "golden_doc_uuids": ["fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31"], "golden_chunk_uuids": [["fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", 3]], "golden_documents": [{"uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Crackers/soundex.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 6, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 0, "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n", "meta": {"hash_id": "d4e53c9f890933bcdad035e0f04d8edee8c3bbf5428a9e78d46eaf9e3077f4e4"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 1, "content": " def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n", "meta": {"hash_id": "f959fd100ae004424915db9f72cd1185233ecf6e5b0098056cb676123de45335"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 2, "content": " # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n", "meta": {"hash_id": "7d75952b52a8227663a53e4eb18c687e3b394c31ad4e40dbde696d8ed8bb1949"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 3, "content": " sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n", "meta": {"hash_id": "5e94ad3c3be95a611661e8fd6ed46d2d314a3718f7a1bbbcc8a6613bc461ed1d"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 4, "content": " def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n", "meta": {"hash_id": "82d9cdd15be688d06533d1616d7a0d0168c6bf5a6391656f8dad84b399d78586"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 5, "content": " for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"hash_id": "2573a4c0001ca85f1558dfc55b6b5cc10708aa2a39cbe0c467e221c4a3b0d6ac"}}]}], "golden_chunks": [{"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 3, "content": " sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n", "meta": {"hash_id": "5e94ad3c3be95a611661e8fd6ed46d2d314a3718f7a1bbbcc8a6613bc461ed1d"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Crackers/soundex.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 6, "source": "codebases/jsonl"}} +{"query": "What parameters does the `Soundex` class take in its constructor?", "answer": "The `Soundex` class takes a `Config` object as a parameter in its constructor. It uses this `Config` object to initialize two attributes:\n- `self.SOUNDEX_DICT`: a Soundex dictionary, obtained by calling `config.get_resource(self._params()[\"dict\"], Translation)`\n- `self.word_freq`: a list of words ordered by frequency, obtained by calling `config.get_resource(self._params()[\"freq\"], Translation)`\n\nIt also initializes two other attributes:\n- `self.frequency_dict`: an empty dictionary to store the frequency rank sums of generated sentences\n- `self.sentence_freq`: a variable initialized to 0, used to keep track of the current sentence's frequency rank sum", "golden_doc_uuids": ["fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31"], "golden_chunk_uuids": [["fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", 5]], "golden_documents": [{"uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Crackers/soundex.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 6, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 0, "content": "import re\nfrom typing import Dict, List, Optional\n\nimport logging\nfrom rich.logging import RichHandler\n\nfrom ciphey.iface import (\n Config,\n Cracker,\n CrackInfo,\n CrackResult,\n ParamSpec,\n Translation,\n registry,\n)\n\n\n@registry.register\nclass Soundex(Cracker[str]):\n def getInfo(self, ctext: str) -> CrackInfo:\n return CrackInfo(\n success_likelihood=0.1,\n success_runtime=1e-5,\n failure_runtime=1e-5,\n )\n\n @staticmethod\n def getTarget() -> str:\n return \"soundex\"\n\n", "meta": {"hash_id": "d4e53c9f890933bcdad035e0f04d8edee8c3bbf5428a9e78d46eaf9e3077f4e4"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 1, "content": " def attemptCrack(self, ctext: str) -> List[CrackResult]:\n \"\"\"\n Attempts to crack Soundex by generating all possible combinations.\n \"\"\"\n logging.debug(\"Attempting Soundex cracker\")\n word_list = []\n sentences = []\n result = []\n\n # Convert to uppercase and replace delimiters and whitespace with nothing\n ctext = re.sub(r\"[,;:\\-\\s]\", \"\", ctext.upper())\n\n # Make sure ctext contains only A-Z and 0-9\n if bool(re.search(r\"[^A-Z0-9]\", ctext)) is True:\n logging.debug(\"Failed to crack soundex due to non soundex character(s)\")\n return None\n\n", "meta": {"hash_id": "f959fd100ae004424915db9f72cd1185233ecf6e5b0098056cb676123de45335"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 2, "content": " # Make sure ctext is divisible by 4\n ctext_len = len(ctext)\n if ctext_len % 4:\n logging.debug(\n f\"Failed to decode Soundex because length must be a multiple of 4, not '{ctext_len}'\"\n )\n return None\n\n # Split ctext into groups of 4\n ctext = \" \".join(ctext[i : i + 4] for i in range(0, len(ctext), 4))\n ctext_split = ctext.split(\" \")\n soundex_keys = self.SOUNDEX_DICT.keys()\n\n # Find all words that correspond to each given soundex code\n for code in ctext_split:\n if code in soundex_keys:\n word_list.append(self.SOUNDEX_DICT[code])\n\n logging.info(f\"Possible words for given encoded text: {word_list}\")\n\n # Find all possible sentences\n self.getSentenceCombo(\n word_list,\n sentences,\n self.frequency_dict,\n self.sentence_freq,\n self.word_freq,\n )\n\n", "meta": {"hash_id": "7d75952b52a8227663a53e4eb18c687e3b394c31ad4e40dbde696d8ed8bb1949"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 3, "content": " sorted_sentences = self.sortlistwithdict(sentences, self.frequency_dict)\n\n for sentence in sorted_sentences:\n result.append(CrackResult(value=sentence))\n\n logging.debug(f\"Soundex cracker - Returning results: {result}\")\n return result\n\n def sortlistwithdict(self, listtosort, hashes):\n \"\"\"\n This function uses the sum of ranks (based on frequency) of each word in each\n sentence and sorts them according to it.\n \"\"\"\n return sorted(listtosort, key=lambda x: hashes[x])\n\n", "meta": {"hash_id": "5e94ad3c3be95a611661e8fd6ed46d2d314a3718f7a1bbbcc8a6613bc461ed1d"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 4, "content": " def getSentenceCombo(\n self, A, sentences, frequency_dict, sentence_freq, word_freq, result=\"\", n=0\n ):\n \"\"\"\n This function uses recursion to generate a list of sentences from all possible\n words for a given set of soundex codes.\n \"\"\"\n logging.debug(\"Creating all possible sentences from Soundex\")\n if n == len(A):\n sentences.append(result[1:])\n for word in result[1:].split():\n # Adding the rank of each word to find out the sentence's net frequency\n if word in word_freq:\n sentence_freq += word_freq.index(word)\n # If the word isn't in the frequency list then it's a very uncommon word\n # so we add a large number (5000)\n else:\n sentence_freq += 5000\n frequency_dict[result[1:]] = sentence_freq\n sentence_freq = 0\n return\n\n", "meta": {"hash_id": "82d9cdd15be688d06533d1616d7a0d0168c6bf5a6391656f8dad84b399d78586"}}, {"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 5, "content": " for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"hash_id": "2573a4c0001ca85f1558dfc55b6b5cc10708aa2a39cbe0c467e221c4a3b0d6ac"}}]}], "golden_chunks": [{"doc_uuid": "fd3a6d5d6a5a1ab1afaae8810c2d2141ea1707b7eb7bfd5b883947d078519c31", "index": 5, "content": " for word in A[n]:\n out = result + \" \" + word\n self.getSentenceCombo(\n A, sentences, frequency_dict, sentence_freq, word_freq, out, n + 1\n )\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The Soundex dictionary to use\",\n req=False,\n default=\"cipheydists::translate::soundex\",\n ),\n \"freq\": ParamSpec(\n desc=\"The word frequency dictionary to use\",\n req=False,\n default=\"cipheydists::list::English5000Freq\",\n ),\n }\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.SOUNDEX_DICT = config.get_resource(self._params()[\"dict\"], Translation)\n self.word_freq = config.get_resource(self._params()[\"freq\"], Translation)\n self.frequency_dict = {}\n self.sentence_freq = 0\n", "meta": {"hash_id": "2573a4c0001ca85f1558dfc55b6b5cc10708aa2a39cbe0c467e221c4a3b0d6ac"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Crackers/soundex.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 6, "source": "codebases/jsonl"}} +{"query": "What parameters can be configured for the Tap_code decoder?", "answer": "The Tap_code decoder has one configurable parameter, which is specified in the getParams() static method. The parameter is named \"dict\" and has the following specification:\n```python\n\"dict\": ParamSpec(\n desc=\"The table of letters used for the tap code interpretation.\",\n req=False,\n default=\"cipheydists::translate::tap_code\",\n)\n```\nIt represents the lookup table used for Tap code decoding. It is an optional parameter with a default value of \"cipheydists::translate::tap_code\".", "golden_doc_uuids": ["44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3"], "golden_chunk_uuids": [["44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3", 1]], "golden_documents": [{"uuid": "44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3", "content": "# by https://github.com/RustyDucky and https://github.com/lukasgabriel\n\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Tap_code(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Tap code decoding\n \"\"\"\n try:\n result = \"\"\n combinations = ctext.split(\" \")\n for fragment in combinations:\n result += self.TABLE.get(fragment)\n return result\n except Exception:\n return None\n\n @staticmethod\n def priority() -> float:\n return 0.06\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.TABLE = config.get_resource(self._params()[\"dict\"], Translation)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The table of letters used for the tap code interpretation.\",\n req=False,\n default=\"cipheydists::translate::tap_code\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"tap_code\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/tap_code.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3", "index": 0, "content": "# by https://github.com/RustyDucky and https://github.com/lukasgabriel\n\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, Translation, U, registry\n\n\n@registry.register\nclass Tap_code(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Tap code decoding\n \"\"\"\n try:\n result = \"\"\n combinations = ctext.split(\" \")\n for fragment in combinations:\n result += self.TABLE.get(fragment)\n return result\n except Exception:\n return None\n\n", "meta": {"hash_id": "e85d41d336d9e14861140f8b9e6a951f7b514b100b80daf647c58f6e3a47a03b"}}, {"doc_uuid": "44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3", "index": 1, "content": " @staticmethod\n def priority() -> float:\n return 0.06\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.TABLE = config.get_resource(self._params()[\"dict\"], Translation)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The table of letters used for the tap code interpretation.\",\n req=False,\n default=\"cipheydists::translate::tap_code\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"tap_code\"\n", "meta": {"hash_id": "bf0b764318559516ca69e9315345d265fc9e282066eaa2fe2b55e837318969a7"}}]}], "golden_chunks": [{"doc_uuid": "44f12a4ef079daf871dc6a95ed7af4ff2ec55b48ca3b004dfc954bf4c9b05ba3", "index": 1, "content": " @staticmethod\n def priority() -> float:\n return 0.06\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.TABLE = config.get_resource(self._params()[\"dict\"], Translation)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The table of letters used for the tap code interpretation.\",\n req=False,\n default=\"cipheydists::translate::tap_code\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"tap_code\"\n", "meta": {"hash_id": "bf0b764318559516ca69e9315345d265fc9e282066eaa2fe2b55e837318969a7"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/tap_code.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How does the CipheyDists class handle configuration?", "answer": "The CipheyDists class takes a `Config` object as a parameter in its constructor. It passes this `config` object to the superclass constructor using `super().__init__(config)`. This suggests that the CipheyDists class inherits from a base class that handles configuration, and it forwards the configuration to the superclass.", "golden_doc_uuids": ["bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2"], "golden_chunk_uuids": [["bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2", 1]], "golden_documents": [{"uuid": "bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2", "content": "from functools import lru_cache\nfrom typing import Any, Dict, Optional, Set\n\nimport cipheydists\nimport logging\n\nfrom ciphey.iface import (\n Config,\n Distribution,\n ParamSpec,\n ResourceLoader,\n Translation,\n WordList,\n registry,\n)\n\n\n@registry.register_multi(WordList, Distribution, Translation)\nclass CipheyDists(ResourceLoader):\n # _wordlists: Set[str] = frozenset({\"english\", \"english1000\", \"englishStopWords\"})\n # _brandons: Set[str] = frozenset({\"english\"})\n # _dists: Set[str] = frozenset({\"twist\"})\n # _translates: Set[str] = frozenset({\"morse\"})\n _getters = {\n \"list\": cipheydists.get_list,\n \"dist\": cipheydists.get_dist,\n \"brandon\": cipheydists.get_brandon,\n \"translate\": cipheydists.get_translate,\n }\n\n def whatResources(self) -> Optional[Set[str]]:\n pass\n\n @lru_cache()\n def getResource(self, name: str) -> Any:\n logging.debug(f\"Loading cipheydists resource {name}\")\n prefix, name = name.split(\"::\", 1)\n return self._getters[prefix](name)\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Resources/cipheydists.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2", "index": 0, "content": "from functools import lru_cache\nfrom typing import Any, Dict, Optional, Set\n\nimport cipheydists\nimport logging\n\nfrom ciphey.iface import (\n Config,\n Distribution,\n ParamSpec,\n ResourceLoader,\n Translation,\n WordList,\n registry,\n)\n\n\n@registry.register_multi(WordList, Distribution, Translation)\nclass CipheyDists(ResourceLoader):\n # _wordlists: Set[str] = frozenset({\"english\", \"english1000\", \"englishStopWords\"})\n # _brandons: Set[str] = frozenset({\"english\"})\n # _dists: Set[str] = frozenset({\"twist\"})\n # _translates: Set[str] = frozenset({\"morse\"})\n _getters = {\n \"list\": cipheydists.get_list,\n \"dist\": cipheydists.get_dist,\n \"brandon\": cipheydists.get_brandon,\n \"translate\": cipheydists.get_translate,\n }\n\n", "meta": {"hash_id": "8a5b65f9667da3bd230313ff441e76a72c1972dc756b30f50f50f96d6ac7ca08"}}, {"doc_uuid": "bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2", "index": 1, "content": " def whatResources(self) -> Optional[Set[str]]:\n pass\n\n @lru_cache()\n def getResource(self, name: str) -> Any:\n logging.debug(f\"Loading cipheydists resource {name}\")\n prefix, name = name.split(\"::\", 1)\n return self._getters[prefix](name)\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n", "meta": {"hash_id": "f291dfb4f23bd0072d636731d357e3a43704127a8518b45958d12c144eb10022"}}]}], "golden_chunks": [{"doc_uuid": "bfc6250497ea53318a31782941f86e13660430636fa5ac61fbda86e2ffb94ea2", "index": 1, "content": " def whatResources(self) -> Optional[Set[str]]:\n pass\n\n @lru_cache()\n def getResource(self, name: str) -> Any:\n logging.debug(f\"Loading cipheydists resource {name}\")\n prefix, name = name.split(\"::\", 1)\n return self._getters[prefix](name)\n\n def __init__(self, config: Config):\n super().__init__(config)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return None\n", "meta": {"hash_id": "f291dfb4f23bd0072d636731d357e3a43704127a8518b45958d12c144eb10022"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Resources/cipheydists.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the priority method used for in the Base69 class?", "answer": "The `priority` method is used to determine the priority of the Base69 decoder. It returns a float value of 0.2, which indicates the priority of this decoder relative to other decoders. The comment suggests that if this value becomes lower than or equal to the reverse (encoding) priority, it may break the decoding process.", "golden_doc_uuids": ["2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3"], "golden_chunk_uuids": [["2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", 3]], "golden_documents": [{"uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "content": "# Translated to Python and adapted for Ciphey from the JS original at https://github.com/pshihn/base69\n\n\nimport re\nfrom math import ceil\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, WordList, registry\n\n\n@registry.register\nclass Base69(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base69 decoding\n \"\"\"\n # Remove whitespace\n try:\n ctext = re.sub(r\"\\s+\", \"\", ctext, flags=re.UNICODE)\n extra_bytes = 0\n clen = len(ctext)\n\n if ctext[:-1] == \"=\":\n extra_bytes = int(ctext[clen - 2])\n\n CHUNK_COUNT = ceil(clen / 16)\n result = [0 for _ in range(CHUNK_COUNT * 7 - extra_bytes)]\n\n for i in range(CHUNK_COUNT):\n chunk_string = ctext[i * 16 : (i + 1) * 16]\n if extra_bytes and (i == CHUNK_COUNT - 1):\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert[0 : 7 - extra_bytes]):\n result[n + i * 7] = elem\n else:\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert):\n result[n + i * 7] = elem % 256\n return bytearray(result).decode().strip(\"\\x00\")\n except Exception:\n return None\n\n def decode_chunk(self, s: str):\n padded_bytes = s.endswith(\"=\")\n\n decoded = [0 for _ in range(8)]\n for i in range(8):\n decoded[i] = (\n 0\n if i == 7 and padded_bytes\n else self.chars_to_byte(s[i * 2 : i * 2 + 2])\n )\n\n result = [0 for _ in range(7)]\n for i in range(7):\n t1 = decoded[i] << (i + 1)\n t2 = decoded[i + 1] >> (7 - i - 1)\n result[i] = t1 | t2\n return result\n\n def chars_to_byte(self, s: str):\n return (69 * self.CHARS.index(s[1])) + (self.CHARS.index(s[0]))\n\n @staticmethod\n def priority() -> float:\n # If this becomes lower or equal to the reverse, it breaks.\n # So I'll set it to 0.2 for now since it is very fast anyways.\n return 0.2\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.CHARS = config.get_resource(self._params()[\"dict\"], WordList)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The charset used for the decoder.\",\n req=False,\n default=\"cipheydists::list::base69\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"base69\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/base69.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "index": 0, "content": "# Translated to Python and adapted for Ciphey from the JS original at https://github.com/pshihn/base69\n\n\nimport re\nfrom math import ceil\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, WordList, registry\n\n\n@registry.register\nclass Base69(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base69 decoding\n \"\"\"\n # Remove whitespace\n try:\n ctext = re.sub(r\"\\s+\", \"\", ctext, flags=re.UNICODE)\n extra_bytes = 0\n clen = len(ctext)\n\n if ctext[:-1] == \"=\":\n extra_bytes = int(ctext[clen - 2])\n\n CHUNK_COUNT = ceil(clen / 16)\n result = [0 for _ in range(CHUNK_COUNT * 7 - extra_bytes)]\n\n", "meta": {"hash_id": "2461f7e9d7d08c990883ed16d21a1ecb0ed9ef5661f66be1305717197075097f"}}, {"doc_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "index": 1, "content": " for i in range(CHUNK_COUNT):\n chunk_string = ctext[i * 16 : (i + 1) * 16]\n if extra_bytes and (i == CHUNK_COUNT - 1):\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert[0 : 7 - extra_bytes]):\n result[n + i * 7] = elem\n else:\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert):\n result[n + i * 7] = elem % 256\n return bytearray(result).decode().strip(\"\\x00\")\n except Exception:\n return None\n\n", "meta": {"hash_id": "d7de29cd37c2138837aa5409f18b547f57a472745caa48668fa29469ab507328"}}, {"doc_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "index": 2, "content": " def decode_chunk(self, s: str):\n padded_bytes = s.endswith(\"=\")\n\n decoded = [0 for _ in range(8)]\n for i in range(8):\n decoded[i] = (\n 0\n if i == 7 and padded_bytes\n else self.chars_to_byte(s[i * 2 : i * 2 + 2])\n )\n\n result = [0 for _ in range(7)]\n for i in range(7):\n t1 = decoded[i] << (i + 1)\n t2 = decoded[i + 1] >> (7 - i - 1)\n result[i] = t1 | t2\n return result\n\n", "meta": {"hash_id": "79c5130158f80e721e37a8b4689defffaec023bd5b436c7111532ba5cba384e5"}}, {"doc_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "index": 3, "content": " def chars_to_byte(self, s: str):\n return (69 * self.CHARS.index(s[1])) + (self.CHARS.index(s[0]))\n\n @staticmethod\n def priority() -> float:\n # If this becomes lower or equal to the reverse, it breaks.\n # So I'll set it to 0.2 for now since it is very fast anyways.\n return 0.2\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.CHARS = config.get_resource(self._params()[\"dict\"], WordList)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The charset used for the decoder.\",\n req=False,\n default=\"cipheydists::list::base69\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"base69\"\n", "meta": {"hash_id": "31c6161231aff199013f2ac4f095123513a8864937653fcef44a1e65b46a7526"}}]}], "golden_chunks": [{"doc_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "index": 3, "content": " def chars_to_byte(self, s: str):\n return (69 * self.CHARS.index(s[1])) + (self.CHARS.index(s[0]))\n\n @staticmethod\n def priority() -> float:\n # If this becomes lower or equal to the reverse, it breaks.\n # So I'll set it to 0.2 for now since it is very fast anyways.\n return 0.2\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.CHARS = config.get_resource(self._params()[\"dict\"], WordList)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The charset used for the decoder.\",\n req=False,\n default=\"cipheydists::list::base69\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"base69\"\n", "meta": {"hash_id": "31c6161231aff199013f2ac4f095123513a8864937653fcef44a1e65b46a7526"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/base69.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "How are the parameters for the Base69 class specified?", "answer": "The parameters for the Base69 class are specified using the `getParams` method. It returns a dictionary with a single parameter named \"dict\". The \"dict\" parameter is of type `ParamSpec` and specifies the character set (WordList) used for decoding. It has a default value of \"cipheydists::list::base69\", which is likely a reference to a predefined character set.", "golden_doc_uuids": ["2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3"], "golden_chunk_uuids": [["2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", 3]], "golden_documents": [{"uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "content": "# Translated to Python and adapted for Ciphey from the JS original at https://github.com/pshihn/base69\n\n\nimport re\nfrom math import ceil\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, WordList, registry\n\n\n@registry.register\nclass Base69(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base69 decoding\n \"\"\"\n # Remove whitespace\n try:\n ctext = re.sub(r\"\\s+\", \"\", ctext, flags=re.UNICODE)\n extra_bytes = 0\n clen = len(ctext)\n\n if ctext[:-1] == \"=\":\n extra_bytes = int(ctext[clen - 2])\n\n CHUNK_COUNT = ceil(clen / 16)\n result = [0 for _ in range(CHUNK_COUNT * 7 - extra_bytes)]\n\n for i in range(CHUNK_COUNT):\n chunk_string = ctext[i * 16 : (i + 1) * 16]\n if extra_bytes and (i == CHUNK_COUNT - 1):\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert[0 : 7 - extra_bytes]):\n result[n + i * 7] = elem\n else:\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert):\n result[n + i * 7] = elem % 256\n return bytearray(result).decode().strip(\"\\x00\")\n except Exception:\n return None\n\n def decode_chunk(self, s: str):\n padded_bytes = s.endswith(\"=\")\n\n decoded = [0 for _ in range(8)]\n for i in range(8):\n decoded[i] = (\n 0\n if i == 7 and padded_bytes\n else self.chars_to_byte(s[i * 2 : i * 2 + 2])\n )\n\n result = [0 for _ in range(7)]\n for i in range(7):\n t1 = decoded[i] << (i + 1)\n t2 = decoded[i + 1] >> (7 - i - 1)\n result[i] = t1 | t2\n return result\n\n def chars_to_byte(self, s: str):\n return (69 * self.CHARS.index(s[1])) + (self.CHARS.index(s[0]))\n\n @staticmethod\n def priority() -> float:\n # If this becomes lower or equal to the reverse, it breaks.\n # So I'll set it to 0.2 for now since it is very fast anyways.\n return 0.2\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.CHARS = config.get_resource(self._params()[\"dict\"], WordList)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The charset used for the decoder.\",\n req=False,\n default=\"cipheydists::list::base69\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"base69\"\n", "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/base69.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "index": 0, "content": "# Translated to Python and adapted for Ciphey from the JS original at https://github.com/pshihn/base69\n\n\nimport re\nfrom math import ceil\nfrom typing import Dict, Optional\n\nfrom ciphey.iface import Config, Decoder, ParamSpec, T, U, WordList, registry\n\n\n@registry.register\nclass Base69(Decoder[str]):\n def decode(self, ctext: T) -> Optional[U]:\n \"\"\"\n Performs Base69 decoding\n \"\"\"\n # Remove whitespace\n try:\n ctext = re.sub(r\"\\s+\", \"\", ctext, flags=re.UNICODE)\n extra_bytes = 0\n clen = len(ctext)\n\n if ctext[:-1] == \"=\":\n extra_bytes = int(ctext[clen - 2])\n\n CHUNK_COUNT = ceil(clen / 16)\n result = [0 for _ in range(CHUNK_COUNT * 7 - extra_bytes)]\n\n", "meta": {"hash_id": "2461f7e9d7d08c990883ed16d21a1ecb0ed9ef5661f66be1305717197075097f"}}, {"doc_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "index": 1, "content": " for i in range(CHUNK_COUNT):\n chunk_string = ctext[i * 16 : (i + 1) * 16]\n if extra_bytes and (i == CHUNK_COUNT - 1):\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert[0 : 7 - extra_bytes]):\n result[n + i * 7] = elem\n else:\n insert = self.decode_chunk(chunk_string)\n for n, elem in enumerate(insert):\n result[n + i * 7] = elem % 256\n return bytearray(result).decode().strip(\"\\x00\")\n except Exception:\n return None\n\n", "meta": {"hash_id": "d7de29cd37c2138837aa5409f18b547f57a472745caa48668fa29469ab507328"}}, {"doc_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "index": 2, "content": " def decode_chunk(self, s: str):\n padded_bytes = s.endswith(\"=\")\n\n decoded = [0 for _ in range(8)]\n for i in range(8):\n decoded[i] = (\n 0\n if i == 7 and padded_bytes\n else self.chars_to_byte(s[i * 2 : i * 2 + 2])\n )\n\n result = [0 for _ in range(7)]\n for i in range(7):\n t1 = decoded[i] << (i + 1)\n t2 = decoded[i + 1] >> (7 - i - 1)\n result[i] = t1 | t2\n return result\n\n", "meta": {"hash_id": "79c5130158f80e721e37a8b4689defffaec023bd5b436c7111532ba5cba384e5"}}, {"doc_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "index": 3, "content": " def chars_to_byte(self, s: str):\n return (69 * self.CHARS.index(s[1])) + (self.CHARS.index(s[0]))\n\n @staticmethod\n def priority() -> float:\n # If this becomes lower or equal to the reverse, it breaks.\n # So I'll set it to 0.2 for now since it is very fast anyways.\n return 0.2\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.CHARS = config.get_resource(self._params()[\"dict\"], WordList)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The charset used for the decoder.\",\n req=False,\n default=\"cipheydists::list::base69\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"base69\"\n", "meta": {"hash_id": "31c6161231aff199013f2ac4f095123513a8864937653fcef44a1e65b46a7526"}}]}], "golden_chunks": [{"doc_uuid": "2da927c1c66089a8d0af2c7edd199977cc56933b1ba803439d7f2f7f7592f3a3", "index": 3, "content": " def chars_to_byte(self, s: str):\n return (69 * self.CHARS.index(s[1])) + (self.CHARS.index(s[0]))\n\n @staticmethod\n def priority() -> float:\n # If this becomes lower or equal to the reverse, it breaks.\n # So I'll set it to 0.2 for now since it is very fast anyways.\n return 0.2\n\n def __init__(self, config: Config):\n super().__init__(config)\n self.CHARS = config.get_resource(self._params()[\"dict\"], WordList)\n\n @staticmethod\n def getParams() -> Optional[Dict[str, ParamSpec]]:\n return {\n \"dict\": ParamSpec(\n desc=\"The charset used for the decoder.\",\n req=False,\n default=\"cipheydists::list::base69\",\n )\n }\n\n @staticmethod\n def getTarget() -> str:\n return \"base69\"\n", "meta": {"hash_id": "31c6161231aff199013f2ac4f095123513a8864937653fcef44a1e65b46a7526"}}], "meta": {"doctype": "codebase", "relative_path": "/ciphey/basemods/Decoders/base69.py", "repo_name": "Ciphey/Ciphey", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "What encryption schemes do the tests cover?", "answer": "The test functions cover decryption of messages encrypted with various schemes, including:\n- A1Z26 substitution cipher (test_a1z26)\n- Affine cipher (test_affine) \n- ASCII shift cipher (test_ascii_shift)\n- Atbash cipher (test_atbash)\n- Baconian cipher with complete and standard variants (test_baconian_complete_variant, test_baconian_standard_variant)\n- Base32 (test_base32)\n- Base58 with Bitcoin and Ripple alphabets (test_base58_bitcoin, test_base58_ripple)\n- Base62 (test_base62)\n- Base64 (test_base64)\n- Base85 (test_base85)\n- Base91 (test_base91)\n- Baudot code (test_baudot)\n- Binary ASCII (test_binary)\n- Braille (test_braille)\n- Brainfuck language (test_brainfuck)\n- Caesar cipher (test_caesar)\n- Decimal ASCII (test_decimal) \nand many more. Each test function contains an encrypted string and checks that decrypt decodes it to the expected plaintext.", "golden_doc_uuids": ["1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74"], "golden_chunk_uuids": [["1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", 0]], "golden_documents": [{"uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "content": "import pytest\n\nfrom ciphey import decrypt\nfrom ciphey.iface import Config\n\nanswer_str = \"Hello my name is bee and I like dog and apple and tree\"\n\n\ndef test_a1z26():\n res = decrypt(\n Config().library_default().complete_config(),\n \"8 5 12 12 15 13 25 14 1 13 5 9 19 2 5 5 1 14 4 9 12 9 11 5 4 15 7 1 14 4 1 16 16 12 5 1 14 4 20 18 5 5\",\n )\n assert res == \"hellomynameisbeeandilikedogandappleandtree\"\n\n\ndef test_affine():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Ihsst bf kxbh rd ghh xky R srjh ytz xky xccsh xky muhh\",\n )\n assert res == answer_str\n\n\ndef test_ascii_shift():\n res = decrypt(\n Config().library_default().complete_config(),\n '\"?FFIzGSzH;G?zCMzz#zFCE?z>IAz;H>z;JJF?z;H>zNL??',\n )\n assert res == answer_str\n\n\ndef test_atbash():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Svool nb mznv rh yvv zmw R orpv wlt zmw zkkov zmw givv\",\n )\n assert res == answer_str\n\n\ndef test_baconian_complete_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABB ABABB ABBBA ABBAA BBAAA ABBAB AAAAA ABBAA AABAA ABAAA BAABA AAAAB AABAA AABAA AAAAA ABBAB AAABB ABAAA ABABB ABAAA ABABA AABAA AAABB ABBBA AABBA AAAAA ABBAB AAABB AAAAA ABBBB ABBBB ABABB AABAA AAAAA ABBAB AAABB BAABB BAAAB AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n\ndef test_baconian_standard_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABA ABABA ABBAB ABABB BABBA ABBAA AAAAA ABABB AABAA ABAAA BAAAB AAAAB AABAA AABAA AAAAA ABBAA AAABB ABAAA ABABA ABAAA ABAAB AABAA AAABB ABBAB AABBA AAAAA ABBAA AAABB AAAAA ABBBA ABBBA ABABA AABAA AAAAA ABBAA AAABB BAABA BAAAA AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n\ndef test_base32():\n res = decrypt(\n Config().library_default().complete_config(),\n \"JBSWY3DPEBWXSIDOMFWWKIDJOMQGEZLFEBQW4ZBAJEQGY2LLMUQGI33HEBQW4ZBAMFYHA3DFEBQW4ZBAORZGKZI=\",\n )\n assert res == answer_str\n\n\ndef test_base58_bitcoin():\n res = decrypt(\n Config().library_default().complete_config(),\n \"6qYhNwsP46Mn4gy6gyANfsMm2icAxGFA6gnFjVm9phYHeby7PZm3vthiXxSU77teQgTFGbHETn\",\n )\n assert res == answer_str\n\n\ndef test_base58_ripple():\n res = decrypt(\n Config().library_default().complete_config(),\n \"aqY64A1PhaM8hgyagyw4C1Mmp5cwxGEwag8EjVm9F6YHebyfPZmsvt65XxS7ffteQgTEGbHNT8\",\n )\n assert res == answer_str\n\n\ndef test_base62():\n res = decrypt(\n Config().library_default().complete_config(),\n \"2mQvnz9Yevvb7DRCuyDltsP31vJLToR5pjE9orWkzHMUsht2kbC96PLbZ1sdIocsGHENrzC2n\",\n )\n assert res == answer_str\n\n\ndef test_base64():\n res = decrypt(\n Config().library_default().complete_config(),\n \"SGVsbG8gbXkgbmFtZSBpcyBiZWUgYW5kIEkgbGlrZSBkb2cgYW5kIGFwcGxlIGFuZCB0cmVl\",\n )\n\n assert res == answer_str\n\n\ndef test_base69():\n res = decrypt(\n Config().library_default().complete_config(),\n \"kAZAtABBeB8A-AoB8ADBNAhBLA1AFBgA0AXBfBGATAVAFBgAwAWBHBu7ARt\",\n )\n assert res == answer_str\n\n\ndef test_base91():\n res = decrypt(\n Config().library_default().complete_config(),\n \">OwJh>=/fV@$x88j9ZNKB*ge$yV%lE%ZKi,+<]>-.-[+>-----<]>++.+++++++..+++.+[+>++<]>.[++>+<]>---.--[+++>-<]>.-[+>++++<]>.[++>+<]>--.-[+++>++<]>-.+[-->---<]>.--------.[+++++>+<]>+.-[+++>--<]>-.++++++++++.---[+>++<]>.[+++>-<]>++.+++..[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.-[+++>+<]>--.-[+>----<]>.++[+++>--<]>.---.++.------.[+++++>+<]>+.+[+>---<]>+.+++++++++++.--------.-[+++>-<]>--.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[+++>-<]>+.-[-->---<]>..----.-------.[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[++>+<]>++++.--.-------------..\",\n )\n assert res == answer_str\n\n\ndef test_brandon():\n res = decrypt(\n Config().library_default().complete_config(),\n \"R hvv blf tzgsvi yvuliv nv...sfmtib...gviirurvw... Xofgxsrmt blfi yzyvh gl blfi yivzhg. Vnkvili Vnsbi srh nzixsvw srh ovtrlmh rmgl lfi ozmwh... Ozrw hrvtv gl vevib uligivhh uiln sviv gl gsv Yofv Nlfmgzrmh. Izyrw zmw izevmlfh, sv yrgvh zmw yrgvh zdzb. Nvm lu gsv Mligs, blf hgzmw zg gsv kivxrkrxv. Blfi prmth szev uzrovw blf, hl mld blf gfim gl gsv tlwh! Zmw bvg blf wl mlg kovzw? Blf wl mlg pmvvo gl wfhg blfi svzwh drgs zhs? Rmhgvzw blf dzro, Dsb szev gsv tlwh ulihzpvm fh? Dv nfhg ollp rmgl gsv girzoh dv uzrovw olmt ztl! Rm z grnv kzhhvw, lfi dliow rmgvigdrmvw drgs zmlgsvi gsilfts zm fksvzezo hxslozih xzoo gsv Xlmqfmxgrlm lu gsv Hksvivh... Gsv tlwh zooldvw fmslob ulixvh gl hork rmgl lfi wlnzrm. Gsv luuhkirmt lu gszg xzgzxobhn dzh gsv mvuvirlfh ulixv xzoovw nztrx... Bvg dv wrw mlg yzmrhs rg, rmhgvzw hgfwbrmt gsv erov zixzmv uli lfi kldvi zmw dvzogs! Zmw gsv nlmhgvih zg lfi wlli...gsv fmslob ivorxgh lu gsrh Xlmqfmxgrlm? ...gsv gilooh...gsv xlikhv vzgvih...gsv dvivdloevh? Wrw dv izrhv lfi hdliwh ztzrmhg gsvn? Li szev dv ozrw gsrh yfiwvm lm lgsvih? Lm hl-xzoovw drgxsvih? Hgizb xsrowivm gzftsg gsv dzbh lu ulfo hlixvib, gsvri ylwrvh nfgzgvw gsilfts yozhksvnlfh irgfzo. Hvmg gl urtsg nlmhgvih gslfts gsvb xlfow mlg wrhgrmtfrhs tllw uiln vero. Gsv uorxpvi lu sfnzmrgb olmt vcgrmtfrhsvw drgsrm gsvn. Bvh, gsvri mfnyvih szev wdrmwovw gsilfts gsv bvzih. Yfg z uvd hgroo ilzn lfi ozmwh, luuvirmt gsvri yollwb dlip uli xlrm. Gl gsrh wzb gsvb hsznv fh drgs gsvri evib vcrhgvmxv! Gsv Mligs yovvwh, uolttvw yb dzi. Gsv yzggovh ziv gsv tlwh' dsrk, xszhgrhvnvmg uli lfi hrmh! Zmw ovg fh mlg ulitvg gsv gviilih, gsv hxlfitvh uiln yvblmw lfi dliow! Gsv Drow Sfmg irwvh gsv hpb drgs vevib ufoo nllm! Gsv wzip izrwvih zywfxg lfi xsrowivm rmgl ozmwh fmpmldm! Hlnv hzb gsvb svizow z hvxlmw Xlmqfmxgrlm! Xzm dv xszig z xlfihv yzxp rmgl gsv ortsg? Droo dv urmw gsv hgivmtgs gl yzmrhs gsv nztvh uiln lfi prmtwlnh? Fmrgv zilfmw gsv dzings lu gsv Vgvimzo Uriv? Mrts rh gsv Grnv lu gsv Hdliw zmw gsv Zcv! Mlmv droo urtsg gsrh dzi rm lfi hgvzw! Mrts rh gsv Grnv lu Nzwmvhh zmw Wrhwzrm!\",\n )\n assert bool(res) is True\n\n\ndef test_caesar():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Uryyb zl anzr vf orr naq V yvxr qbt naq nccyr naq gerr\",\n )\n assert res == answer_str\n\n\ndef test_decimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"72 101 108 108 111 32 109 121 32 110 97 109 101 32 105 115 32 98 101 101 32 97 110 100 32 73 32 108 105 107 101 32 100 111 103 32 97 110 100 32 97 112 112 108 101 32 97 110 100 32 116 114 101 101\",\n )\n assert res == answer_str\n\n\ndef test_dna():\n res = decrypt(\n Config().library_default().complete_config(),\n \"GAT AAT GCT ATT TCT ATT AAT ACT GAA CGT GAA TCT ACT ATT AAT GGT\",\n )\n assert res == \"DNAISINTERESTING\"\n\n\ndef test_dtmf():\n res = decrypt(\n Config().library_default().complete_config(),\n \"1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697\",\n )\n assert res == answer_str\n\n\ndef test_galactic():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᒷꖎꖎ𝙹 ᒲ|| リᔑᒲᒷ ╎ᓭ ʖᒷᒷ ᔑリ↸ i ꖎ╎ꖌᒷ ↸𝙹⊣ ᔑリ↸ ᔑ!¡!¡ꖎᒷ ᔑリ↸ ℸ ̣ ∷ᒷᒷ\",\n )\n assert res == answer_str.lower()\n\n\ndef test_galactic_Xproblem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᔑꖎ╎⎓ᔑ ̇/, ̇/||ꖎ𝙹!¡⍑𝙹リᒷ, ᔑ ̇/ ᔑꖎ𝙹リᒷ ᔑリ↸ ̇/ᒷ∷𝙹 ̇/ ⎓∷𝙹ᒲ 𝙹 ̇/⎓𝙹∷↸\",\n )\n assert res == \"halifax, xylophone, a x alone and xerox from oxford\"\n\n\ndef test_gzip():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H4sIAAzul18A/yXJzQmAMBSEwVa+ckwZT7LIw80P6sXuA3ocZpM9aC89msibXSJ6peA8RR3Hx5jTfzyXtAAbQvCyNgAAAA==\",\n )\n assert res == answer_str\n\n\ndef test_hexadecimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"48 65 6c 6c 6f 20 6d 79 20 6e 61 6d 65 20 69 73 20 62 65 65 20 61 6e 64 20 49 20 6c 69 6b 65 20 64 6f 67 20 61 6e 64 20 61 70 70 6c 65 20 61 6e 64 20 74 72 65 65\",\n )\n\n assert res == answer_str\n\n\ndef test_json_problem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"0110100001100101011011000110110001101111\",\n )\n assert res != \"0110100001100101011011000110110001101111\"\n\n\ndef test_leetspeak():\n res = decrypt(\n Config().library_default().complete_config(),\n \"|-|3ll0 my n4m3 1s 833 4nd 1 l1k3 D06 4ND 4ppl3 4nd 7R33\",\n )\n assert res.lower() == answer_str.lower()\n\n\ndef test_morse_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \".... . .-.. .-.. ---/-- -.--/-. .- -- ./.. .../-... . ./.- -. -../../.-.. .. -.- ./-.. --- --./.- -. -../.- .--. .--. .-.. ./.- -. -../- .-. . .\",\n )\n assert res == answer_str.upper()\n\n\ndef test_multi_tap():\n res = decrypt(\n Config().library_default().complete_config(),\n \"44 33 555 555 666 0 6 999 0 66 2 6 33 0 444 7777 0 22 33 33 0 2 66 3 0 444 0 555 444 55 33 0 3 666 4 0 2 66 3 0 2 7 7 555 33 0 2 66 3 0 8 777 33 33\",\n )\n assert res == answer_str.upper()\n\n\ndef test_new_line_at_start_returns():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"\\npass\\n\")\n\n assert res == \"\\npass\\n\"\n\n\ndef test_new_line_strip_and_return():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"pass\\n\")\n\n assert res == \"pass\\n\"\n\n\ndef test_octal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"110 145 154 154 157 40 155 171 40 156 141 155 145 40 151 163 40 142 145 145 40 141 156 144 40 111 40 154 151 153 145 40 144 157 147 40 141 156 144 40 141 160 160 154 145 40 141 156 144 40 164 162 145 145\",\n )\n assert res == answer_str\n\n\ndef test_plaintext():\n res = decrypt(Config().library_default().complete_config(), answer_str)\n assert res == answer_str\n\n\ndef test_quadgrams_messed_up_spacing():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\",\n )\n assert (\n res == \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\"\n )\n\n\ndef test_quadgrams_no_spaces():\n res = decrypt(\n Config().library_default().complete_config(),\n \"HellomynameisbeeandIlikedogandappleandtree\",\n )\n assert res == \"HellomynameisbeeandIlikedogandappleandtree\"\n\n\ndef test_quadgrams_space_between_every_letter():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\",\n )\n assert (\n res\n == \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\"\n )\n\n\ndef test_reversed_text():\n res = decrypt(\n Config().library_default().complete_config(),\n \"eert dna elppa dna god ekil I dna eeb si eman ym olleH\",\n )\n assert res == answer_str\n\n\ndef test_rot47():\n res = decrypt(\n Config().library_default().complete_config(),\n \"$A9:?I @7 3=24< BF2CEK[ ;F586 >J G@H\",\n )\n assert res == \"Sphinx of black quartz, judge my vow\"\n\n\ndef test_soundex():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H236 I200 I500 T000 P230\",\n )\n assert res.lower() == \"history is in the past\"\n\n\ndef test_tap_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \"4,4 1,5 4,3 4,4 3,4 3,3 1,5 4,4 5,2 3,4 4,4 2,3 4,2 1,5 1,5\",\n )\n assert res == \"test one two three\".upper()\n\n\ndef test_url():\n res = decrypt(\n Config().library_default().complete_config(),\n \"https%3A%2F%2Fwww%2Egoogle%2Ecom%2Fsearch%3Fq%3Dciphey\",\n )\n assert res == \"https://www.google.com/search?q=ciphey\"\n\n\ndef test_uuencode():\n res = decrypt(\n Config().library_default().complete_config(),\n 'begin 644 /dev/stdout\\nM2&5L;&\\\\@;7D@;F%M92!Iz#zFCE?z>IAz;H>z;JJF?z;H>zNL??',\n )\n assert res == answer_str\n\n\ndef test_atbash():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Svool nb mznv rh yvv zmw R orpv wlt zmw zkkov zmw givv\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "1f8550d707a0a86e4361f02deb8a15f6a07ce1940cb96b9f82d8741ee50503b4"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 2, "content": "\ndef test_baconian_complete_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABB ABABB ABBBA ABBAA BBAAA ABBAB AAAAA ABBAA AABAA ABAAA BAABA AAAAB AABAA AABAA AAAAA ABBAB AAABB ABAAA ABABB ABAAA ABABA AABAA AAABB ABBBA AABBA AAAAA ABBAB AAABB AAAAA ABBBB ABBBB ABABB AABAA AAAAA ABBAB AAABB BAABB BAAAB AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n", "meta": {"hash_id": "fd04cb2b16b26138bc0a89db0d1d3860b8b25a4266168c485adc0a10756ddafc"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 3, "content": "\ndef test_baconian_standard_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABA ABABA ABBAB ABABB BABBA ABBAA AAAAA ABABB AABAA ABAAA BAAAB AAAAB AABAA AABAA AAAAA ABBAA AAABB ABAAA ABABA ABAAA ABAAB AABAA AAABB ABBAB AABBA AAAAA ABBAA AAABB AAAAA ABBBA ABBBA ABABA AABAA AAAAA ABBAA AAABB BAABA BAAAA AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n", "meta": {"hash_id": "b825c941fc4f386788f190214eacb66d67998ce1009c5c294749efa95dd5882a"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 4, "content": "\ndef test_base32():\n res = decrypt(\n Config().library_default().complete_config(),\n \"JBSWY3DPEBWXSIDOMFWWKIDJOMQGEZLFEBQW4ZBAJEQGY2LLMUQGI33HEBQW4ZBAMFYHA3DFEBQW4ZBAORZGKZI=\",\n )\n assert res == answer_str\n\n\ndef test_base58_bitcoin():\n res = decrypt(\n Config().library_default().complete_config(),\n \"6qYhNwsP46Mn4gy6gyANfsMm2icAxGFA6gnFjVm9phYHeby7PZm3vthiXxSU77teQgTFGbHETn\",\n )\n assert res == answer_str\n\n\ndef test_base58_ripple():\n res = decrypt(\n Config().library_default().complete_config(),\n \"aqY64A1PhaM8hgyagyw4C1Mmp5cwxGEwag8EjVm9F6YHebyfPZmsvt65XxS7ffteQgTEGbHNT8\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "84e34cedeb13d2d617637d1a58edc2a7b91d39dd48ae39438c25352f20cbe223"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 5, "content": "\ndef test_base62():\n res = decrypt(\n Config().library_default().complete_config(),\n \"2mQvnz9Yevvb7DRCuyDltsP31vJLToR5pjE9orWkzHMUsht2kbC96PLbZ1sdIocsGHENrzC2n\",\n )\n assert res == answer_str\n\n\ndef test_base64():\n res = decrypt(\n Config().library_default().complete_config(),\n \"SGVsbG8gbXkgbmFtZSBpcyBiZWUgYW5kIEkgbGlrZSBkb2cgYW5kIGFwcGxlIGFuZCB0cmVl\",\n )\n\n assert res == answer_str\n\n", "meta": {"hash_id": "fbbfc9e9bc031e3858273dd7b457a729f6ddcd25b2a3eafcb66d43893638202b"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 6, "content": "\ndef test_base69():\n res = decrypt(\n Config().library_default().complete_config(),\n \"kAZAtABBeB8A-AoB8ADBNAhBLA1AFBgA0AXBfBGATAVAFBgAwAWBHBu7ARt\",\n )\n assert res == answer_str\n\n\ndef test_base91():\n res = decrypt(\n Config().library_default().complete_config(),\n \">OwJh>=/fV@$x88j9ZNKB*ge$yV%lE%ZKi,+<]>-.-[+>-----<]>++.+++++++..+++.+[+>++<]>.[++>+<]>---.--[+++>-<]>.-[+>++++<]>.[++>+<]>--.-[+++>++<]>-.+[-->---<]>.--------.[+++++>+<]>+.-[+++>--<]>-.++++++++++.---[+>++<]>.[+++>-<]>++.+++..[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.-[+++>+<]>--.-[+>----<]>.++[+++>--<]>.---.++.------.[+++++>+<]>+.+[+>---<]>+.+++++++++++.--------.-[+++>-<]>--.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[+++>-<]>+.-[-->---<]>..----.-------.[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[++>+<]>++++.--.-------------..\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "1ff2b5c2e2a20f773144c3456da8353be8d9fd0f8441b5066913aabb1610ef02"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 11, "content": "\ndef test_brandon():\n res = decrypt(\n Config().library_default().complete_config(),\n \"R hvv blf tzgsvi yvuliv nv...sfmtib...gviirurvw... Xofgxsrmt blfi yzyvh gl blfi yivzhg. Vnkvili Vnsbi srh nzixsvw srh ovtrlmh rmgl lfi ozmwh... Ozrw hrvtv gl vevib uligivhh uiln sviv gl gsv Yofv Nlfmgzrmh. Izyrw zmw izevmlfh, sv yrgvh zmw yrgvh zdzb. Nvm lu gsv Mligs, blf hgzmw zg gsv kivxrkrxv. Blfi prmth szev uzrovw blf, hl mld blf gfim gl gsv tlwh! Zmw bvg blf wl mlg kovzw? Blf wl mlg pmvvo gl wfhg blfi svzwh drgs zhs? Rmhgvzw blf dzro, Dsb szev gsv tlwh ulihzpvm fh? Dv nfhg ollp rmgl gsv girzoh dv uzrovw olmt ztl! Rm z grnv kzhhvw, ", "meta": {"hash_id": "d99f2f3a5d6da7b61c4e7ddb52f2af7301f6cd44e42e0d7baa4e0da6ea7aac7c"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 12, "content": "lfi dliow rmgvigdrmvw drgs zmlgsvi gsilfts zm fksvzezo hxslozih xzoo gsv Xlmqfmxgrlm lu gsv Hksvivh... Gsv tlwh zooldvw fmslob ulixvh gl hork rmgl lfi wlnzrm. Gsv luuhkirmt lu gszg xzgzxobhn dzh gsv mvuvirlfh ulixv xzoovw nztrx... Bvg dv wrw mlg yzmrhs rg, rmhgvzw hgfwbrmt gsv erov zixzmv uli lfi kldvi zmw dvzogs! Zmw gsv nlmhgvih zg lfi wlli...gsv fmslob ivorxgh lu gsrh Xlmqfmxgrlm? ...gsv gilooh...gsv xlikhv vzgvih...gsv dvivdloevh? Wrw dv izrhv lfi hdliwh ztzrmhg gsvn? Li szev dv ozrw gsrh yfiwvm lm lgsvih? Lm hl-xzoovw drgxsvih? Hgizb xsrowivm gzftsg gsv dzbh lu ulfo hlixvib, ", "meta": {"hash_id": "0e9c5866b41c9e68a3c490df193a6662e4e89fddeab852562cf032fa102e0b50"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 13, "content": "gsvri ylwrvh nfgzgvw gsilfts yozhksvnlfh irgfzo. Hvmg gl urtsg nlmhgvih gslfts gsvb xlfow mlg wrhgrmtfrhs tllw uiln vero. Gsv uorxpvi lu sfnzmrgb olmt vcgrmtfrhsvw drgsrm gsvn. Bvh, gsvri mfnyvih szev wdrmwovw gsilfts gsv bvzih. Yfg z uvd hgroo ilzn lfi ozmwh, luuvirmt gsvri yollwb dlip uli xlrm. Gl gsrh wzb gsvb hsznv fh drgs gsvri evib vcrhgvmxv! Gsv Mligs yovvwh, uolttvw yb dzi. Gsv yzggovh ziv gsv tlwh' dsrk, xszhgrhvnvmg uli lfi hrmh! Zmw ovg fh mlg ulitvg gsv gviilih, gsv hxlfitvh uiln yvblmw lfi dliow! Gsv Drow Sfmg irwvh gsv hpb drgs vevib ufoo nllm! Gsv wzip izrwvih zywfxg lfi xsrowivm rmgl ozmwh fmpmldm! Hlnv hzb gsvb svizow z hvxlmw Xlmqfmxgrlm! Xzm dv xszig z xlfihv yzxp rmgl gsv ortsg? Droo dv urmw gsv hgivmtgs gl yzmrhs gsv nztvh uiln lfi prmtwlnh? Fmrgv zilfmw gsv dzings lu gsv Vgvimzo Uriv? Mrts rh gsv Grnv lu gsv Hdliw zmw gsv Zcv! Mlmv droo urtsg gsrh dzi rm lfi hgvzw! Mrts rh gsv Grnv lu Nzwmvhh zmw Wrhwzrm!\",\n )\n assert bool(res) is True\n\n", "meta": {"hash_id": "3f20ee1819451d2ab70a8d644bb2e25b219bfe05d1cf82d1e34cfc824720a664"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 14, "content": "\ndef test_caesar():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Uryyb zl anzr vf orr naq V yvxr qbt naq nccyr naq gerr\",\n )\n assert res == answer_str\n\n\ndef test_decimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"72 101 108 108 111 32 109 121 32 110 97 109 101 32 105 115 32 98 101 101 32 97 110 100 32 73 32 108 105 107 101 32 100 111 103 32 97 110 100 32 97 112 112 108 101 32 97 110 100 32 116 114 101 101\",\n )\n assert res == answer_str\n\n\ndef test_dna():\n res = decrypt(\n Config().library_default().complete_config(),\n \"GAT AAT GCT ATT TCT ATT AAT ACT GAA CGT GAA TCT ACT ATT AAT GGT\",\n )\n assert res == \"DNAISINTERESTING\"\n\n\ndef test_dtmf():\n res = decrypt(\n Config().library_default().complete_config(),\n", "meta": {"hash_id": "d3afa2e2a048120a8ff1d44e08029ff850020d0bbb8dc1d57e7311e07c7c9589"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 15, "content": " \"1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697\",", "meta": {"hash_id": "9613ea129232a5edb1513e129ec95d2f9e1527b6ac7b181f3d71d58a64ff72e5"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 16, "content": "\n )\n assert res == answer_str\n\n\ndef test_galactic():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᒷꖎꖎ𝙹 ᒲ|| リᔑᒲᒷ ╎ᓭ ʖᒷᒷ ᔑリ↸ i ꖎ╎ꖌᒷ ↸𝙹⊣ ᔑリ↸ ᔑ!¡!¡ꖎᒷ ᔑリ↸ ℸ ̣ ∷ᒷᒷ\",\n )\n assert res == answer_str.lower()\n\n\ndef test_galactic_Xproblem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᔑꖎ╎⎓ᔑ ̇/, ̇/||ꖎ𝙹!¡⍑𝙹リᒷ, ᔑ ̇/ ᔑꖎ𝙹リᒷ ᔑリ↸ ̇/ᒷ∷𝙹 ̇/ ⎓∷𝙹ᒲ 𝙹 ̇/⎓𝙹∷↸\",\n )\n assert res == \"halifax, xylophone, a x alone and xerox from oxford\"\n\n\ndef test_gzip():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H4sIAAzul18A/yXJzQmAMBSEwVa+ckwZT7LIw80P6sXuA3ocZpM9aC89msibXSJ6peA8RR3Hx5jTfzyXtAAbQvCyNgAAAA==\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "6cdd3ce4c4e36cc1dac31944ccf863b914115b7361b9e31cf1c597b9cca20500"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 17, "content": "\ndef test_hexadecimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"48 65 6c 6c 6f 20 6d 79 20 6e 61 6d 65 20 69 73 20 62 65 65 20 61 6e 64 20 49 20 6c 69 6b 65 20 64 6f 67 20 61 6e 64 20 61 70 70 6c 65 20 61 6e 64 20 74 72 65 65\",\n )\n\n assert res == answer_str\n\n\ndef test_json_problem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"0110100001100101011011000110110001101111\",\n )\n assert res != \"0110100001100101011011000110110001101111\"\n\n\ndef test_leetspeak():\n res = decrypt(\n Config().library_default().complete_config(),\n \"|-|3ll0 my n4m3 1s 833 4nd 1 l1k3 D06 4ND 4ppl3 4nd 7R33\",\n )\n assert res.lower() == answer_str.lower()\n\n", "meta": {"hash_id": "63befd229db5b9d262f2cf69d22b40e5f8f3f23bbf459e72b842d59dd27bfaab"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 18, "content": "\ndef test_morse_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \".... . .-.. .-.. ---/-- -.--/-. .- -- ./.. .../-... . ./.- -. -../../.-.. .. -.- ./-.. --- --./.- -. -../.- .--. .--. .-.. ./.- -. -../- .-. . .\",\n )\n assert res == answer_str.upper()\n\n\ndef test_multi_tap():\n res = decrypt(\n Config().library_default().complete_config(),\n \"44 33 555 555 666 0 6 999 0 66 2 6 33 0 444 7777 0 22 33 33 0 2 66 3 0 444 0 555 444 55 33 0 3 666 4 0 2 66 3 0 2 7 7 555 33 0 2 66 3 0 8 777 33 33\",\n )\n assert res == answer_str.upper()\n\n", "meta": {"hash_id": "4c103a60081c345b58832a782bd666d648eeca9e3c0215fb0defd9e210ddf29d"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 19, "content": "\ndef test_new_line_at_start_returns():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"\\npass\\n\")\n\n assert res == \"\\npass\\n\"\n\n\ndef test_new_line_strip_and_return():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"pass\\n\")\n\n assert res == \"pass\\n\"\n\n\ndef test_octal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"110 145 154 154 157 40 155 171 40 156 141 155 145 40 151 163 40 142 145 145 40 141 156 144 40 111 40 154 151 153 145 40 144 157 147 40 141 156 144 40 141 160 160 154 145 40 141 156 144 40 164 162 145 145\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "cd052a53653437d356a4c4db122c3c7007cca052eb180cf88fd42252fcc384a2"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 20, "content": "\ndef test_plaintext():\n res = decrypt(Config().library_default().complete_config(), answer_str)\n assert res == answer_str\n\n\ndef test_quadgrams_messed_up_spacing():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\",\n )\n assert (\n res == \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\"\n )\n\n\ndef test_quadgrams_no_spaces():\n res = decrypt(\n Config().library_default().complete_config(),\n \"HellomynameisbeeandIlikedogandappleandtree\",\n )\n assert res == \"HellomynameisbeeandIlikedogandappleandtree\"\n\n", "meta": {"hash_id": "1ce11d5b5cebb2901613b2d18f30c0d0a0de188b60001f040faeaf152c3554aa"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 21, "content": "\ndef test_quadgrams_space_between_every_letter():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\",\n )\n assert (\n res\n == \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\"\n )\n\n\ndef test_reversed_text():\n res = decrypt(\n Config().library_default().complete_config(),\n \"eert dna elppa dna god ekil I dna eeb si eman ym olleH\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "38ac79d6b134049d0acb0397198da8bdb7e20ef1f58b445c314255053d890598"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 22, "content": "\ndef test_rot47():\n res = decrypt(\n Config().library_default().complete_config(),\n \"$A9:?I @7 3=24< BF2CEK[ ;F586 >J G@H\",\n )\n assert res == \"Sphinx of black quartz, judge my vow\"\n\n\ndef test_soundex():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H236 I200 I500 T000 P230\",\n )\n assert res.lower() == \"history is in the past\"\n\n\ndef test_tap_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \"4,4 1,5 4,3 4,4 3,4 3,3 1,5 4,4 5,2 3,4 4,4 2,3 4,2 1,5 1,5\",\n )\n assert res == \"test one two three\".upper()\n\n\ndef test_url():\n res = decrypt(\n Config().library_default().complete_config(),\n \"https%3A%2F%2Fwww%2Egoogle%2Ecom%2Fsearch%3Fq%3Dciphey\",\n )\n assert res == \"https://www.google.com/search?q=ciphey\"\n\n", "meta": {"hash_id": "4c5aae565dad234d98ce097f3559532f47c96dbb4e2cec02fbc58ae8784f6cde"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 23, "content": "\ndef test_uuencode():\n res = decrypt(\n Config().library_default().complete_config(),\n 'begin 644 /dev/stdout\\nM2&5L;&\\\\@;7D@;F%M92!Iz#zFCE?z>IAz;H>z;JJF?z;H>zNL??',\n )\n assert res == answer_str\n\n\ndef test_atbash():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Svool nb mznv rh yvv zmw R orpv wlt zmw zkkov zmw givv\",\n )\n assert res == answer_str\n\n\ndef test_baconian_complete_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABB ABABB ABBBA ABBAA BBAAA ABBAB AAAAA ABBAA AABAA ABAAA BAABA AAAAB AABAA AABAA AAAAA ABBAB AAABB ABAAA ABABB ABAAA ABABA AABAA AAABB ABBBA AABBA AAAAA ABBAB AAABB AAAAA ABBBB ABBBB ABABB AABAA AAAAA ABBAB AAABB BAABB BAAAB AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n\ndef test_baconian_standard_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABA ABABA ABBAB ABABB BABBA ABBAA AAAAA ABABB AABAA ABAAA BAAAB AAAAB AABAA AABAA AAAAA ABBAA AAABB ABAAA ABABA ABAAA ABAAB AABAA AAABB ABBAB AABBA AAAAA ABBAA AAABB AAAAA ABBBA ABBBA ABABA AABAA AAAAA ABBAA AAABB BAABA BAAAA AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n\ndef test_base32():\n res = decrypt(\n Config().library_default().complete_config(),\n \"JBSWY3DPEBWXSIDOMFWWKIDJOMQGEZLFEBQW4ZBAJEQGY2LLMUQGI33HEBQW4ZBAMFYHA3DFEBQW4ZBAORZGKZI=\",\n )\n assert res == answer_str\n\n\ndef test_base58_bitcoin():\n res = decrypt(\n Config().library_default().complete_config(),\n \"6qYhNwsP46Mn4gy6gyANfsMm2icAxGFA6gnFjVm9phYHeby7PZm3vthiXxSU77teQgTFGbHETn\",\n )\n assert res == answer_str\n\n\ndef test_base58_ripple():\n res = decrypt(\n Config().library_default().complete_config(),\n \"aqY64A1PhaM8hgyagyw4C1Mmp5cwxGEwag8EjVm9F6YHebyfPZmsvt65XxS7ffteQgTEGbHNT8\",\n )\n assert res == answer_str\n\n\ndef test_base62():\n res = decrypt(\n Config().library_default().complete_config(),\n \"2mQvnz9Yevvb7DRCuyDltsP31vJLToR5pjE9orWkzHMUsht2kbC96PLbZ1sdIocsGHENrzC2n\",\n )\n assert res == answer_str\n\n\ndef test_base64():\n res = decrypt(\n Config().library_default().complete_config(),\n \"SGVsbG8gbXkgbmFtZSBpcyBiZWUgYW5kIEkgbGlrZSBkb2cgYW5kIGFwcGxlIGFuZCB0cmVl\",\n )\n\n assert res == answer_str\n\n\ndef test_base69():\n res = decrypt(\n Config().library_default().complete_config(),\n \"kAZAtABBeB8A-AoB8ADBNAhBLA1AFBgA0AXBfBGATAVAFBgAwAWBHBu7ARt\",\n )\n assert res == answer_str\n\n\ndef test_base91():\n res = decrypt(\n Config().library_default().complete_config(),\n \">OwJh>=/fV@$x88j9ZNKB*ge$yV%lE%ZKi,+<]>-.-[+>-----<]>++.+++++++..+++.+[+>++<]>.[++>+<]>---.--[+++>-<]>.-[+>++++<]>.[++>+<]>--.-[+++>++<]>-.+[-->---<]>.--------.[+++++>+<]>+.-[+++>--<]>-.++++++++++.---[+>++<]>.[+++>-<]>++.+++..[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.-[+++>+<]>--.-[+>----<]>.++[+++>--<]>.---.++.------.[+++++>+<]>+.+[+>---<]>+.+++++++++++.--------.-[+++>-<]>--.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[+++>-<]>+.-[-->---<]>..----.-------.[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[++>+<]>++++.--.-------------..\",\n )\n assert res == answer_str\n\n\ndef test_brandon():\n res = decrypt(\n Config().library_default().complete_config(),\n \"R hvv blf tzgsvi yvuliv nv...sfmtib...gviirurvw... Xofgxsrmt blfi yzyvh gl blfi yivzhg. Vnkvili Vnsbi srh nzixsvw srh ovtrlmh rmgl lfi ozmwh... Ozrw hrvtv gl vevib uligivhh uiln sviv gl gsv Yofv Nlfmgzrmh. Izyrw zmw izevmlfh, sv yrgvh zmw yrgvh zdzb. Nvm lu gsv Mligs, blf hgzmw zg gsv kivxrkrxv. Blfi prmth szev uzrovw blf, hl mld blf gfim gl gsv tlwh! Zmw bvg blf wl mlg kovzw? Blf wl mlg pmvvo gl wfhg blfi svzwh drgs zhs? Rmhgvzw blf dzro, Dsb szev gsv tlwh ulihzpvm fh? Dv nfhg ollp rmgl gsv girzoh dv uzrovw olmt ztl! Rm z grnv kzhhvw, lfi dliow rmgvigdrmvw drgs zmlgsvi gsilfts zm fksvzezo hxslozih xzoo gsv Xlmqfmxgrlm lu gsv Hksvivh... Gsv tlwh zooldvw fmslob ulixvh gl hork rmgl lfi wlnzrm. Gsv luuhkirmt lu gszg xzgzxobhn dzh gsv mvuvirlfh ulixv xzoovw nztrx... Bvg dv wrw mlg yzmrhs rg, rmhgvzw hgfwbrmt gsv erov zixzmv uli lfi kldvi zmw dvzogs! Zmw gsv nlmhgvih zg lfi wlli...gsv fmslob ivorxgh lu gsrh Xlmqfmxgrlm? ...gsv gilooh...gsv xlikhv vzgvih...gsv dvivdloevh? Wrw dv izrhv lfi hdliwh ztzrmhg gsvn? Li szev dv ozrw gsrh yfiwvm lm lgsvih? Lm hl-xzoovw drgxsvih? Hgizb xsrowivm gzftsg gsv dzbh lu ulfo hlixvib, gsvri ylwrvh nfgzgvw gsilfts yozhksvnlfh irgfzo. Hvmg gl urtsg nlmhgvih gslfts gsvb xlfow mlg wrhgrmtfrhs tllw uiln vero. Gsv uorxpvi lu sfnzmrgb olmt vcgrmtfrhsvw drgsrm gsvn. Bvh, gsvri mfnyvih szev wdrmwovw gsilfts gsv bvzih. Yfg z uvd hgroo ilzn lfi ozmwh, luuvirmt gsvri yollwb dlip uli xlrm. Gl gsrh wzb gsvb hsznv fh drgs gsvri evib vcrhgvmxv! Gsv Mligs yovvwh, uolttvw yb dzi. Gsv yzggovh ziv gsv tlwh' dsrk, xszhgrhvnvmg uli lfi hrmh! Zmw ovg fh mlg ulitvg gsv gviilih, gsv hxlfitvh uiln yvblmw lfi dliow! Gsv Drow Sfmg irwvh gsv hpb drgs vevib ufoo nllm! Gsv wzip izrwvih zywfxg lfi xsrowivm rmgl ozmwh fmpmldm! Hlnv hzb gsvb svizow z hvxlmw Xlmqfmxgrlm! Xzm dv xszig z xlfihv yzxp rmgl gsv ortsg? Droo dv urmw gsv hgivmtgs gl yzmrhs gsv nztvh uiln lfi prmtwlnh? Fmrgv zilfmw gsv dzings lu gsv Vgvimzo Uriv? Mrts rh gsv Grnv lu gsv Hdliw zmw gsv Zcv! Mlmv droo urtsg gsrh dzi rm lfi hgvzw! Mrts rh gsv Grnv lu Nzwmvhh zmw Wrhwzrm!\",\n )\n assert bool(res) is True\n\n\ndef test_caesar():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Uryyb zl anzr vf orr naq V yvxr qbt naq nccyr naq gerr\",\n )\n assert res == answer_str\n\n\ndef test_decimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"72 101 108 108 111 32 109 121 32 110 97 109 101 32 105 115 32 98 101 101 32 97 110 100 32 73 32 108 105 107 101 32 100 111 103 32 97 110 100 32 97 112 112 108 101 32 97 110 100 32 116 114 101 101\",\n )\n assert res == answer_str\n\n\ndef test_dna():\n res = decrypt(\n Config().library_default().complete_config(),\n \"GAT AAT GCT ATT TCT ATT AAT ACT GAA CGT GAA TCT ACT ATT AAT GGT\",\n )\n assert res == \"DNAISINTERESTING\"\n\n\ndef test_dtmf():\n res = decrypt(\n Config().library_default().complete_config(),\n \"1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697\",\n )\n assert res == answer_str\n\n\ndef test_galactic():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᒷꖎꖎ𝙹 ᒲ|| リᔑᒲᒷ ╎ᓭ ʖᒷᒷ ᔑリ↸ i ꖎ╎ꖌᒷ ↸𝙹⊣ ᔑリ↸ ᔑ!¡!¡ꖎᒷ ᔑリ↸ ℸ ̣ ∷ᒷᒷ\",\n )\n assert res == answer_str.lower()\n\n\ndef test_galactic_Xproblem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᔑꖎ╎⎓ᔑ ̇/, ̇/||ꖎ𝙹!¡⍑𝙹リᒷ, ᔑ ̇/ ᔑꖎ𝙹リᒷ ᔑリ↸ ̇/ᒷ∷𝙹 ̇/ ⎓∷𝙹ᒲ 𝙹 ̇/⎓𝙹∷↸\",\n )\n assert res == \"halifax, xylophone, a x alone and xerox from oxford\"\n\n\ndef test_gzip():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H4sIAAzul18A/yXJzQmAMBSEwVa+ckwZT7LIw80P6sXuA3ocZpM9aC89msibXSJ6peA8RR3Hx5jTfzyXtAAbQvCyNgAAAA==\",\n )\n assert res == answer_str\n\n\ndef test_hexadecimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"48 65 6c 6c 6f 20 6d 79 20 6e 61 6d 65 20 69 73 20 62 65 65 20 61 6e 64 20 49 20 6c 69 6b 65 20 64 6f 67 20 61 6e 64 20 61 70 70 6c 65 20 61 6e 64 20 74 72 65 65\",\n )\n\n assert res == answer_str\n\n\ndef test_json_problem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"0110100001100101011011000110110001101111\",\n )\n assert res != \"0110100001100101011011000110110001101111\"\n\n\ndef test_leetspeak():\n res = decrypt(\n Config().library_default().complete_config(),\n \"|-|3ll0 my n4m3 1s 833 4nd 1 l1k3 D06 4ND 4ppl3 4nd 7R33\",\n )\n assert res.lower() == answer_str.lower()\n\n\ndef test_morse_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \".... . .-.. .-.. ---/-- -.--/-. .- -- ./.. .../-... . ./.- -. -../../.-.. .. -.- ./-.. --- --./.- -. -../.- .--. .--. .-.. ./.- -. -../- .-. . .\",\n )\n assert res == answer_str.upper()\n\n\ndef test_multi_tap():\n res = decrypt(\n Config().library_default().complete_config(),\n \"44 33 555 555 666 0 6 999 0 66 2 6 33 0 444 7777 0 22 33 33 0 2 66 3 0 444 0 555 444 55 33 0 3 666 4 0 2 66 3 0 2 7 7 555 33 0 2 66 3 0 8 777 33 33\",\n )\n assert res == answer_str.upper()\n\n\ndef test_new_line_at_start_returns():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"\\npass\\n\")\n\n assert res == \"\\npass\\n\"\n\n\ndef test_new_line_strip_and_return():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"pass\\n\")\n\n assert res == \"pass\\n\"\n\n\ndef test_octal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"110 145 154 154 157 40 155 171 40 156 141 155 145 40 151 163 40 142 145 145 40 141 156 144 40 111 40 154 151 153 145 40 144 157 147 40 141 156 144 40 141 160 160 154 145 40 141 156 144 40 164 162 145 145\",\n )\n assert res == answer_str\n\n\ndef test_plaintext():\n res = decrypt(Config().library_default().complete_config(), answer_str)\n assert res == answer_str\n\n\ndef test_quadgrams_messed_up_spacing():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\",\n )\n assert (\n res == \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\"\n )\n\n\ndef test_quadgrams_no_spaces():\n res = decrypt(\n Config().library_default().complete_config(),\n \"HellomynameisbeeandIlikedogandappleandtree\",\n )\n assert res == \"HellomynameisbeeandIlikedogandappleandtree\"\n\n\ndef test_quadgrams_space_between_every_letter():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\",\n )\n assert (\n res\n == \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\"\n )\n\n\ndef test_reversed_text():\n res = decrypt(\n Config().library_default().complete_config(),\n \"eert dna elppa dna god ekil I dna eeb si eman ym olleH\",\n )\n assert res == answer_str\n\n\ndef test_rot47():\n res = decrypt(\n Config().library_default().complete_config(),\n \"$A9:?I @7 3=24< BF2CEK[ ;F586 >J G@H\",\n )\n assert res == \"Sphinx of black quartz, judge my vow\"\n\n\ndef test_soundex():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H236 I200 I500 T000 P230\",\n )\n assert res.lower() == \"history is in the past\"\n\n\ndef test_tap_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \"4,4 1,5 4,3 4,4 3,4 3,3 1,5 4,4 5,2 3,4 4,4 2,3 4,2 1,5 1,5\",\n )\n assert res == \"test one two three\".upper()\n\n\ndef test_url():\n res = decrypt(\n Config().library_default().complete_config(),\n \"https%3A%2F%2Fwww%2Egoogle%2Ecom%2Fsearch%3Fq%3Dciphey\",\n )\n assert res == \"https://www.google.com/search?q=ciphey\"\n\n\ndef test_uuencode():\n res = decrypt(\n Config().library_default().complete_config(),\n 'begin 644 /dev/stdout\\nM2&5L;&\\\\@;7D@;F%M92!Iz#zFCE?z>IAz;H>z;JJF?z;H>zNL??',\n )\n assert res == answer_str\n\n\ndef test_atbash():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Svool nb mznv rh yvv zmw R orpv wlt zmw zkkov zmw givv\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "1f8550d707a0a86e4361f02deb8a15f6a07ce1940cb96b9f82d8741ee50503b4"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 2, "content": "\ndef test_baconian_complete_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABB ABABB ABBBA ABBAA BBAAA ABBAB AAAAA ABBAA AABAA ABAAA BAABA AAAAB AABAA AABAA AAAAA ABBAB AAABB ABAAA ABABB ABAAA ABABA AABAA AAABB ABBBA AABBA AAAAA ABBAB AAABB AAAAA ABBBB ABBBB ABABB AABAA AAAAA ABBAB AAABB BAABB BAAAB AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n", "meta": {"hash_id": "fd04cb2b16b26138bc0a89db0d1d3860b8b25a4266168c485adc0a10756ddafc"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 3, "content": "\ndef test_baconian_standard_variant():\n res = decrypt(\n Config().library_default().complete_config(),\n \"AABBB AABAA ABABA ABABA ABBAB ABABB BABBA ABBAA AAAAA ABABB AABAA ABAAA BAAAB AAAAB AABAA AABAA AAAAA ABBAA AAABB ABAAA ABABA ABAAA ABAAB AABAA AAABB ABBAB AABBA AAAAA ABBAA AAABB AAAAA ABBBA ABBBA ABABA AABAA AAAAA ABBAA AAABB BAABA BAAAA AABAA AABAA\",\n )\n assert res == \"HELLOMYNAMEISBEEANDILIKEDOGANDAPPLEANDTREE\"\n\n", "meta": {"hash_id": "b825c941fc4f386788f190214eacb66d67998ce1009c5c294749efa95dd5882a"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 4, "content": "\ndef test_base32():\n res = decrypt(\n Config().library_default().complete_config(),\n \"JBSWY3DPEBWXSIDOMFWWKIDJOMQGEZLFEBQW4ZBAJEQGY2LLMUQGI33HEBQW4ZBAMFYHA3DFEBQW4ZBAORZGKZI=\",\n )\n assert res == answer_str\n\n\ndef test_base58_bitcoin():\n res = decrypt(\n Config().library_default().complete_config(),\n \"6qYhNwsP46Mn4gy6gyANfsMm2icAxGFA6gnFjVm9phYHeby7PZm3vthiXxSU77teQgTFGbHETn\",\n )\n assert res == answer_str\n\n\ndef test_base58_ripple():\n res = decrypt(\n Config().library_default().complete_config(),\n \"aqY64A1PhaM8hgyagyw4C1Mmp5cwxGEwag8EjVm9F6YHebyfPZmsvt65XxS7ffteQgTEGbHNT8\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "84e34cedeb13d2d617637d1a58edc2a7b91d39dd48ae39438c25352f20cbe223"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 5, "content": "\ndef test_base62():\n res = decrypt(\n Config().library_default().complete_config(),\n \"2mQvnz9Yevvb7DRCuyDltsP31vJLToR5pjE9orWkzHMUsht2kbC96PLbZ1sdIocsGHENrzC2n\",\n )\n assert res == answer_str\n\n\ndef test_base64():\n res = decrypt(\n Config().library_default().complete_config(),\n \"SGVsbG8gbXkgbmFtZSBpcyBiZWUgYW5kIEkgbGlrZSBkb2cgYW5kIGFwcGxlIGFuZCB0cmVl\",\n )\n\n assert res == answer_str\n\n", "meta": {"hash_id": "fbbfc9e9bc031e3858273dd7b457a729f6ddcd25b2a3eafcb66d43893638202b"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 6, "content": "\ndef test_base69():\n res = decrypt(\n Config().library_default().complete_config(),\n \"kAZAtABBeB8A-AoB8ADBNAhBLA1AFBgA0AXBfBGATAVAFBgAwAWBHBu7ARt\",\n )\n assert res == answer_str\n\n\ndef test_base91():\n res = decrypt(\n Config().library_default().complete_config(),\n \">OwJh>=/fV@$x88j9ZNKB*ge$yV%lE%ZKi,+<]>-.-[+>-----<]>++.+++++++..+++.+[+>++<]>.[++>+<]>---.--[+++>-<]>.-[+>++++<]>.[++>+<]>--.-[+++>++<]>-.+[-->---<]>.--------.[+++++>+<]>+.-[+++>--<]>-.++++++++++.---[+>++<]>.[+++>-<]>++.+++..[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.-[+++>+<]>--.-[+>----<]>.++[+++>--<]>.---.++.------.[+++++>+<]>+.+[+>---<]>+.+++++++++++.--------.-[+++>-<]>--.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[+++>-<]>+.-[-->---<]>..----.-------.[+++++>+<]>+.[+++>-<]>+.+[-->---<]>+.----------.-[+++>-<]>-.[++>+<]>++++.--.-------------..\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "1ff2b5c2e2a20f773144c3456da8353be8d9fd0f8441b5066913aabb1610ef02"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 11, "content": "\ndef test_brandon():\n res = decrypt(\n Config().library_default().complete_config(),\n \"R hvv blf tzgsvi yvuliv nv...sfmtib...gviirurvw... Xofgxsrmt blfi yzyvh gl blfi yivzhg. Vnkvili Vnsbi srh nzixsvw srh ovtrlmh rmgl lfi ozmwh... Ozrw hrvtv gl vevib uligivhh uiln sviv gl gsv Yofv Nlfmgzrmh. Izyrw zmw izevmlfh, sv yrgvh zmw yrgvh zdzb. Nvm lu gsv Mligs, blf hgzmw zg gsv kivxrkrxv. Blfi prmth szev uzrovw blf, hl mld blf gfim gl gsv tlwh! Zmw bvg blf wl mlg kovzw? Blf wl mlg pmvvo gl wfhg blfi svzwh drgs zhs? Rmhgvzw blf dzro, Dsb szev gsv tlwh ulihzpvm fh? Dv nfhg ollp rmgl gsv girzoh dv uzrovw olmt ztl! Rm z grnv kzhhvw, ", "meta": {"hash_id": "d99f2f3a5d6da7b61c4e7ddb52f2af7301f6cd44e42e0d7baa4e0da6ea7aac7c"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 12, "content": "lfi dliow rmgvigdrmvw drgs zmlgsvi gsilfts zm fksvzezo hxslozih xzoo gsv Xlmqfmxgrlm lu gsv Hksvivh... Gsv tlwh zooldvw fmslob ulixvh gl hork rmgl lfi wlnzrm. Gsv luuhkirmt lu gszg xzgzxobhn dzh gsv mvuvirlfh ulixv xzoovw nztrx... Bvg dv wrw mlg yzmrhs rg, rmhgvzw hgfwbrmt gsv erov zixzmv uli lfi kldvi zmw dvzogs! Zmw gsv nlmhgvih zg lfi wlli...gsv fmslob ivorxgh lu gsrh Xlmqfmxgrlm? ...gsv gilooh...gsv xlikhv vzgvih...gsv dvivdloevh? Wrw dv izrhv lfi hdliwh ztzrmhg gsvn? Li szev dv ozrw gsrh yfiwvm lm lgsvih? Lm hl-xzoovw drgxsvih? Hgizb xsrowivm gzftsg gsv dzbh lu ulfo hlixvib, ", "meta": {"hash_id": "0e9c5866b41c9e68a3c490df193a6662e4e89fddeab852562cf032fa102e0b50"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 13, "content": "gsvri ylwrvh nfgzgvw gsilfts yozhksvnlfh irgfzo. Hvmg gl urtsg nlmhgvih gslfts gsvb xlfow mlg wrhgrmtfrhs tllw uiln vero. Gsv uorxpvi lu sfnzmrgb olmt vcgrmtfrhsvw drgsrm gsvn. Bvh, gsvri mfnyvih szev wdrmwovw gsilfts gsv bvzih. Yfg z uvd hgroo ilzn lfi ozmwh, luuvirmt gsvri yollwb dlip uli xlrm. Gl gsrh wzb gsvb hsznv fh drgs gsvri evib vcrhgvmxv! Gsv Mligs yovvwh, uolttvw yb dzi. Gsv yzggovh ziv gsv tlwh' dsrk, xszhgrhvnvmg uli lfi hrmh! Zmw ovg fh mlg ulitvg gsv gviilih, gsv hxlfitvh uiln yvblmw lfi dliow! Gsv Drow Sfmg irwvh gsv hpb drgs vevib ufoo nllm! Gsv wzip izrwvih zywfxg lfi xsrowivm rmgl ozmwh fmpmldm! Hlnv hzb gsvb svizow z hvxlmw Xlmqfmxgrlm! Xzm dv xszig z xlfihv yzxp rmgl gsv ortsg? Droo dv urmw gsv hgivmtgs gl yzmrhs gsv nztvh uiln lfi prmtwlnh? Fmrgv zilfmw gsv dzings lu gsv Vgvimzo Uriv? Mrts rh gsv Grnv lu gsv Hdliw zmw gsv Zcv! Mlmv droo urtsg gsrh dzi rm lfi hgvzw! Mrts rh gsv Grnv lu Nzwmvhh zmw Wrhwzrm!\",\n )\n assert bool(res) is True\n\n", "meta": {"hash_id": "3f20ee1819451d2ab70a8d644bb2e25b219bfe05d1cf82d1e34cfc824720a664"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 14, "content": "\ndef test_caesar():\n res = decrypt(\n Config().library_default().complete_config(),\n \"Uryyb zl anzr vf orr naq V yvxr qbt naq nccyr naq gerr\",\n )\n assert res == answer_str\n\n\ndef test_decimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"72 101 108 108 111 32 109 121 32 110 97 109 101 32 105 115 32 98 101 101 32 97 110 100 32 73 32 108 105 107 101 32 100 111 103 32 97 110 100 32 97 112 112 108 101 32 97 110 100 32 116 114 101 101\",\n )\n assert res == answer_str\n\n\ndef test_dna():\n res = decrypt(\n Config().library_default().complete_config(),\n \"GAT AAT GCT ATT TCT ATT AAT ACT GAA CGT GAA TCT ACT ATT AAT GGT\",\n )\n assert res == \"DNAISINTERESTING\"\n\n\ndef test_dtmf():\n res = decrypt(\n Config().library_default().complete_config(),\n", "meta": {"hash_id": "d3afa2e2a048120a8ff1d44e08029ff850020d0bbb8dc1d57e7311e07c7c9589"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 15, "content": " \"1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1209-697 1336-941 1336-941 1336-941 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1209-697 1336-941 1336-941 1336-941 1209-697 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697 1336-941 1209-697 1209-697 1336-941 1336-941 1209-697 1336-941 1209-697\",", "meta": {"hash_id": "9613ea129232a5edb1513e129ec95d2f9e1527b6ac7b181f3d71d58a64ff72e5"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 16, "content": "\n )\n assert res == answer_str\n\n\ndef test_galactic():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᒷꖎꖎ𝙹 ᒲ|| リᔑᒲᒷ ╎ᓭ ʖᒷᒷ ᔑリ↸ i ꖎ╎ꖌᒷ ↸𝙹⊣ ᔑリ↸ ᔑ!¡!¡ꖎᒷ ᔑリ↸ ℸ ̣ ∷ᒷᒷ\",\n )\n assert res == answer_str.lower()\n\n\ndef test_galactic_Xproblem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"⍑ᔑꖎ╎⎓ᔑ ̇/, ̇/||ꖎ𝙹!¡⍑𝙹リᒷ, ᔑ ̇/ ᔑꖎ𝙹リᒷ ᔑリ↸ ̇/ᒷ∷𝙹 ̇/ ⎓∷𝙹ᒲ 𝙹 ̇/⎓𝙹∷↸\",\n )\n assert res == \"halifax, xylophone, a x alone and xerox from oxford\"\n\n\ndef test_gzip():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H4sIAAzul18A/yXJzQmAMBSEwVa+ckwZT7LIw80P6sXuA3ocZpM9aC89msibXSJ6peA8RR3Hx5jTfzyXtAAbQvCyNgAAAA==\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "6cdd3ce4c4e36cc1dac31944ccf863b914115b7361b9e31cf1c597b9cca20500"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 17, "content": "\ndef test_hexadecimal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"48 65 6c 6c 6f 20 6d 79 20 6e 61 6d 65 20 69 73 20 62 65 65 20 61 6e 64 20 49 20 6c 69 6b 65 20 64 6f 67 20 61 6e 64 20 61 70 70 6c 65 20 61 6e 64 20 74 72 65 65\",\n )\n\n assert res == answer_str\n\n\ndef test_json_problem():\n res = decrypt(\n Config().library_default().complete_config(),\n \"0110100001100101011011000110110001101111\",\n )\n assert res != \"0110100001100101011011000110110001101111\"\n\n\ndef test_leetspeak():\n res = decrypt(\n Config().library_default().complete_config(),\n \"|-|3ll0 my n4m3 1s 833 4nd 1 l1k3 D06 4ND 4ppl3 4nd 7R33\",\n )\n assert res.lower() == answer_str.lower()\n\n", "meta": {"hash_id": "63befd229db5b9d262f2cf69d22b40e5f8f3f23bbf459e72b842d59dd27bfaab"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 18, "content": "\ndef test_morse_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \".... . .-.. .-.. ---/-- -.--/-. .- -- ./.. .../-... . ./.- -. -../../.-.. .. -.- ./-.. --- --./.- -. -../.- .--. .--. .-.. ./.- -. -../- .-. . .\",\n )\n assert res == answer_str.upper()\n\n\ndef test_multi_tap():\n res = decrypt(\n Config().library_default().complete_config(),\n \"44 33 555 555 666 0 6 999 0 66 2 6 33 0 444 7777 0 22 33 33 0 2 66 3 0 444 0 555 444 55 33 0 3 666 4 0 2 66 3 0 2 7 7 555 33 0 2 66 3 0 8 777 33 33\",\n )\n assert res == answer_str.upper()\n\n", "meta": {"hash_id": "4c103a60081c345b58832a782bd666d648eeca9e3c0215fb0defd9e210ddf29d"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 19, "content": "\ndef test_new_line_at_start_returns():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"\\npass\\n\")\n\n assert res == \"\\npass\\n\"\n\n\ndef test_new_line_strip_and_return():\n # Language Checker should return True by stripping new line\n # but the new line should be returned to the user as new lines are important\n res = decrypt(Config().library_default().complete_config(), \"pass\\n\")\n\n assert res == \"pass\\n\"\n\n\ndef test_octal():\n res = decrypt(\n Config().library_default().complete_config(),\n \"110 145 154 154 157 40 155 171 40 156 141 155 145 40 151 163 40 142 145 145 40 141 156 144 40 111 40 154 151 153 145 40 144 157 147 40 141 156 144 40 141 160 160 154 145 40 141 156 144 40 164 162 145 145\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "cd052a53653437d356a4c4db122c3c7007cca052eb180cf88fd42252fcc384a2"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 20, "content": "\ndef test_plaintext():\n res = decrypt(Config().library_default().complete_config(), answer_str)\n assert res == answer_str\n\n\ndef test_quadgrams_messed_up_spacing():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\",\n )\n assert (\n res == \"H ello m y na m e is b ee an d I l ik e do g a n d ap pl e a nd tr e e\"\n )\n\n\ndef test_quadgrams_no_spaces():\n res = decrypt(\n Config().library_default().complete_config(),\n \"HellomynameisbeeandIlikedogandappleandtree\",\n )\n assert res == \"HellomynameisbeeandIlikedogandappleandtree\"\n\n", "meta": {"hash_id": "1ce11d5b5cebb2901613b2d18f30c0d0a0de188b60001f040faeaf152c3554aa"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 21, "content": "\ndef test_quadgrams_space_between_every_letter():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\",\n )\n assert (\n res\n == \"H e l l o m y n a m e i s b e e a n d I l i k e d o g a n d a p p l e a n d t r e e\"\n )\n\n\ndef test_reversed_text():\n res = decrypt(\n Config().library_default().complete_config(),\n \"eert dna elppa dna god ekil I dna eeb si eman ym olleH\",\n )\n assert res == answer_str\n\n", "meta": {"hash_id": "38ac79d6b134049d0acb0397198da8bdb7e20ef1f58b445c314255053d890598"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 22, "content": "\ndef test_rot47():\n res = decrypt(\n Config().library_default().complete_config(),\n \"$A9:?I @7 3=24< BF2CEK[ ;F586 >J G@H\",\n )\n assert res == \"Sphinx of black quartz, judge my vow\"\n\n\ndef test_soundex():\n res = decrypt(\n Config().library_default().complete_config(),\n \"H236 I200 I500 T000 P230\",\n )\n assert res.lower() == \"history is in the past\"\n\n\ndef test_tap_code():\n res = decrypt(\n Config().library_default().complete_config(),\n \"4,4 1,5 4,3 4,4 3,4 3,3 1,5 4,4 5,2 3,4 4,4 2,3 4,2 1,5 1,5\",\n )\n assert res == \"test one two three\".upper()\n\n\ndef test_url():\n res = decrypt(\n Config().library_default().complete_config(),\n \"https%3A%2F%2Fwww%2Egoogle%2Ecom%2Fsearch%3Fq%3Dciphey\",\n )\n assert res == \"https://www.google.com/search?q=ciphey\"\n\n", "meta": {"hash_id": "4c5aae565dad234d98ce097f3559532f47c96dbb4e2cec02fbc58ae8784f6cde"}}, {"doc_uuid": "1cb2aa7099194a80d66547995e291634a603cb89864add10e5fa54c0a6656c74", "index": 23, "content": "\ndef test_uuencode():\n res = decrypt(\n Config().library_default().complete_config(),\n 'begin 644 /dev/stdout\\nM2&5L;&\\\\@;7D@;F%M92!I MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}", "golden_doc_uuids": ["538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5"], "golden_chunk_uuids": [["538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", 0]], "golden_documents": [{"uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\nstd::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\nstd::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\nstd::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\nstd::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n", "meta": {"doctype": "codebase", "relative_path": "/ut/value_generators.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 0, "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\n", "meta": {"hash_id": "21dbec98d0e457f85954db54c051f3fbb82ad706834dbb5db263bcb4da0460c9"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 1, "content": "std::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n", "meta": {"hash_id": "fbe7635a080272f4a758a1b2b528a9e0c48c252638a0eec438aabb1e39328273"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 2, "content": " \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\n", "meta": {"hash_id": "9677468010c35686cd4edc78754a98673a81d1cca25a290dd0128658983d60db"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 3, "content": "std::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n", "meta": {"hash_id": "3d8dcd2c9c15e0c34b53b5764b7b7d8667fcecb68d2142934efa059f7b98244e"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 4, "content": " // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n", "meta": {"hash_id": "ac8b300be3634dc9b24ff5956013365c274f10b6afe11de766918cc9172c12f5"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 5, "content": " // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\n", "meta": {"hash_id": "6a9edae3be72b820631010d77907468d2215a7734247dcf7437b47dfd36dd4db"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 6, "content": "std::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n", "meta": {"hash_id": "c7102e8a222c983064b0a84c2aa7d8e66ba3cddb35562acdbfe97b585cbb3c13"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 7, "content": " std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\n", "meta": {"hash_id": "1d9c4f92fe2f6de85aa0d7947bdcfd77417cf41e51a67d543f8ae65379cc9796"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 8, "content": "std::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n", "meta": {"hash_id": "70380ebe2218612b4696181e01c5cfccc3098721f9213c2a76d45e8d2cc41d08"}}]}], "golden_chunks": [{"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 0, "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\n", "meta": {"hash_id": "21dbec98d0e457f85954db54c051f3fbb82ad706834dbb5db263bcb4da0460c9"}}], "meta": {"doctype": "codebase", "relative_path": "/ut/value_generators.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "How does the MakeFixedStrings function work?", "answer": "The MakeFixedStrings function takes a size parameter string_size and returns a vector of strings. It first calls the MakeStrings function to get a vector of strings. Then, it resizes each string in the vector to the specified string_size by appending null characters ('\\0') to the end of each string using std::for_each and a lambda function. This ensures that all strings in the returned vector have a fixed length of string_size. Here's the relevant code:\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}", "golden_doc_uuids": ["538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5"], "golden_chunk_uuids": [["538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", 0]], "golden_documents": [{"uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\nstd::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\nstd::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\nstd::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\nstd::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n", "meta": {"doctype": "codebase", "relative_path": "/ut/value_generators.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 0, "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\n", "meta": {"hash_id": "21dbec98d0e457f85954db54c051f3fbb82ad706834dbb5db263bcb4da0460c9"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 1, "content": "std::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n", "meta": {"hash_id": "fbe7635a080272f4a758a1b2b528a9e0c48c252638a0eec438aabb1e39328273"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 2, "content": " \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\n", "meta": {"hash_id": "9677468010c35686cd4edc78754a98673a81d1cca25a290dd0128658983d60db"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 3, "content": "std::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n", "meta": {"hash_id": "3d8dcd2c9c15e0c34b53b5764b7b7d8667fcecb68d2142934efa059f7b98244e"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 4, "content": " // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n", "meta": {"hash_id": "ac8b300be3634dc9b24ff5956013365c274f10b6afe11de766918cc9172c12f5"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 5, "content": " // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\n", "meta": {"hash_id": "6a9edae3be72b820631010d77907468d2215a7734247dcf7437b47dfd36dd4db"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 6, "content": "std::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n", "meta": {"hash_id": "c7102e8a222c983064b0a84c2aa7d8e66ba3cddb35562acdbfe97b585cbb3c13"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 7, "content": " std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\n", "meta": {"hash_id": "1d9c4f92fe2f6de85aa0d7947bdcfd77417cf41e51a67d543f8ae65379cc9796"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 8, "content": "std::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n", "meta": {"hash_id": "70380ebe2218612b4696181e01c5cfccc3098721f9213c2a76d45e8d2cc41d08"}}]}], "golden_chunks": [{"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 0, "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\n", "meta": {"hash_id": "21dbec98d0e457f85954db54c051f3fbb82ad706834dbb5db263bcb4da0460c9"}}], "meta": {"doctype": "codebase", "relative_path": "/ut/value_generators.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the long string in the MakeStrings function?", "answer": "The MakeStrings function returns a vector of strings. One of the strings in the returned vector is a long string that is repeated multiple times. The purpose of this long string is to test how the code handles long strings. The comment in the code states:\n\"long string to test how those are handled. Here goes more text. \"\nThis long string is repeated multiple times to create a sufficiently long string for testing purposes.", "golden_doc_uuids": ["538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5"], "golden_chunk_uuids": [["538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", 1]], "golden_documents": [{"uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\nstd::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\nstd::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\nstd::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\nstd::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n", "meta": {"doctype": "codebase", "relative_path": "/ut/value_generators.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 0, "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\n", "meta": {"hash_id": "21dbec98d0e457f85954db54c051f3fbb82ad706834dbb5db263bcb4da0460c9"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 1, "content": "std::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n", "meta": {"hash_id": "fbe7635a080272f4a758a1b2b528a9e0c48c252638a0eec438aabb1e39328273"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 2, "content": " \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\n", "meta": {"hash_id": "9677468010c35686cd4edc78754a98673a81d1cca25a290dd0128658983d60db"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 3, "content": "std::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n", "meta": {"hash_id": "3d8dcd2c9c15e0c34b53b5764b7b7d8667fcecb68d2142934efa059f7b98244e"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 4, "content": " // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n", "meta": {"hash_id": "ac8b300be3634dc9b24ff5956013365c274f10b6afe11de766918cc9172c12f5"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 5, "content": " // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\n", "meta": {"hash_id": "6a9edae3be72b820631010d77907468d2215a7734247dcf7437b47dfd36dd4db"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 6, "content": "std::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n", "meta": {"hash_id": "c7102e8a222c983064b0a84c2aa7d8e66ba3cddb35562acdbfe97b585cbb3c13"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 7, "content": " std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\n", "meta": {"hash_id": "1d9c4f92fe2f6de85aa0d7947bdcfd77417cf41e51a67d543f8ae65379cc9796"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 8, "content": "std::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n", "meta": {"hash_id": "70380ebe2218612b4696181e01c5cfccc3098721f9213c2a76d45e8d2cc41d08"}}]}], "golden_chunks": [{"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 1, "content": "std::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n", "meta": {"hash_id": "fbe7635a080272f4a758a1b2b528a9e0c48c252638a0eec438aabb1e39328273"}}], "meta": {"doctype": "codebase", "relative_path": "/ut/value_generators.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "What UUID values are returned by the MakeUUIDs function?", "answer": "The MakeUUIDs function returns a vector of UUID values. It returns the following UUIDs:\n- UUID(0llu, 0llu)\n- UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu)\n- UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu)\n- UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\nThese UUIDs are hardcoded in the function.", "golden_doc_uuids": ["538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5"], "golden_chunk_uuids": [["538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", 3]], "golden_documents": [{"uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\nstd::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\nstd::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\nstd::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\nstd::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n", "meta": {"doctype": "codebase", "relative_path": "/ut/value_generators.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 0, "content": "#include \"value_generators.h\"\n\n#include \n#include \n#include \n\nnamespace {\nusing namespace clickhouse;\n}\n\nstd::vector MakeNumbers() {\n return std::vector {1, 2, 3, 7, 11, 13, 17, 19, 23, 29, 31};\n}\n\nstd::vector MakeBools() {\n return std::vector {1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0};\n}\n\nstd::vector MakeFixedStrings(size_t string_size) {\n std::vector result = MakeStrings();\n\n std::for_each(result.begin(), result.end(), [string_size](auto& value) {\n value.resize(string_size, '\\0');\n });\n\n return result;\n}\n\n", "meta": {"hash_id": "21dbec98d0e457f85954db54c051f3fbb82ad706834dbb5db263bcb4da0460c9"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 1, "content": "std::vector MakeStrings() {\n return {\n \"a\", \"ab\", \"abc\", \"abcd\",\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n", "meta": {"hash_id": "fbe7635a080272f4a758a1b2b528a9e0c48c252638a0eec438aabb1e39328273"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 2, "content": " \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n \"long string to test how those are handled. Here goes more text. \"\n };\n}\n\n", "meta": {"hash_id": "9677468010c35686cd4edc78754a98673a81d1cca25a290dd0128658983d60db"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 3, "content": "std::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n", "meta": {"hash_id": "3d8dcd2c9c15e0c34b53b5764b7b7d8667fcecb68d2142934efa059f7b98244e"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 4, "content": " // Approximatelly +/- 200 years around epoch (and value of epoch itself)\n // with non zero seconds and sub-seconds.\n // Please note there are values outside of DateTime (32-bit) range that might\n // not have correct string representation in CH yet,\n // but still are supported as Int64 values.\n return GenerateVector(values_size,\n [seconds_multiplier, year] (size_t i )-> Int64 {\n return (i - 100) * year * 2 + (i * 10) * seconds_multiplier + i;\n });\n}\n\nstd::vector MakeDates32() {\n // in CH Date32 internally a UInt32 and stores a day number\n // ColumnDate expects values to be seconds, which is then\n // converted to day number internally, hence the `* 86400`.\n // 114634 * 86400 is 2282-11-10, last integer that fits into DateTime32 range\n // (max is 2283-11-11)\n std::vector result = MakeDates();\n\n", "meta": {"hash_id": "ac8b300be3634dc9b24ff5956013365c274f10b6afe11de766918cc9172c12f5"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 5, "content": " // add corresponding negative values, since pre-epoch date are supported too.\n const auto size = result.size();\n for (size_t i = 0; i < size; ++i) {\n result.push_back(result[i] * -1);\n }\n\n return result;\n}\n\nstd::vector MakeDateTimes() {\n // in CH DateTime internally a UInt32\n return {\n 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536,\n 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864,\n 134217728, 268435456, 536870912, 1073741824, 2147483648, 4294967296 - 1\n };\n}\n\n", "meta": {"hash_id": "6a9edae3be72b820631010d77907468d2215a7734247dcf7437b47dfd36dd4db"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 6, "content": "std::vector MakeInt128s() {\n return {\n absl::MakeInt128(0xffffffffffffffffll, 0xffffffffffffffffll), // -1\n absl::MakeInt128(0, 0xffffffffffffffffll), // 2^64\n absl::MakeInt128(0xffffffffffffffffll, 0),\n absl::MakeInt128(0x8000000000000000ll, 0),\n Int128(0)\n };\n}\n\nstd::vector MakeDecimals(size_t /*precision*/, size_t scale) {\n const auto scale_multiplier = static_cast(std::pow(10, scale));\n const long long int rhs_value = 12345678910;\n\n const std::vector vals {0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536 - 1};\n\n std::vector result;\n result.reserve(vals.size());\n\n", "meta": {"hash_id": "c7102e8a222c983064b0a84c2aa7d8e66ba3cddb35562acdbfe97b585cbb3c13"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 7, "content": " std::transform(vals.begin(), vals.end(), std::back_inserter(result), [scale_multiplier, rhs_value](const auto& value) {\n return value * scale_multiplier + rhs_value % scale_multiplier;\n });\n\n return result;\n}\n\nstd::string FooBarGenerator(size_t i) {\n std::string result;\n if (i % 3 == 0)\n result += \"Foo\";\n if (i % 5 == 0)\n result += \"Bar\";\n if (result.empty())\n result = std::to_string(i);\n\n return result;\n}\n\n", "meta": {"hash_id": "1d9c4f92fe2f6de85aa0d7947bdcfd77417cf41e51a67d543f8ae65379cc9796"}}, {"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 8, "content": "std::vector MakeIPv4s() {\n return {\n MakeIPv4(0x12345678), // 255.255.255.255\n MakeIPv4(0x0100007f), // 127.0.0.1\n MakeIPv4(3585395774),\n MakeIPv4(0),\n MakeIPv4(0x12345678),\n };\n}\n\nstd::vector MakeIPv6s() {\n return {\n MakeIPv6(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15), // 1:203:405:607:809:a0b:c0d:e0f\n MakeIPv6(0, 0, 0, 0, 0, 1), // ::1\n MakeIPv6(0, 0, 0, 0, 0, 0), // ::\n MakeIPv6(0xff, 0xff, 204, 152, 189, 116), // ::ffff:204.152.189.116\n };\n}\n", "meta": {"hash_id": "70380ebe2218612b4696181e01c5cfccc3098721f9213c2a76d45e8d2cc41d08"}}]}], "golden_chunks": [{"doc_uuid": "538e985a1d85e0fc67ab55f40ee6dade761bf959d5e8f3daca45b722935ba6a5", "index": 3, "content": "std::vector MakeUUIDs() {\n return {\n UUID(0llu, 0llu),\n UUID(0xbb6a8c699ab2414cllu, 0x86697b7fd27f0825llu),\n UUID(0x84b9f24bc26b49c6llu, 0xa03b4ab723341951llu),\n UUID(0x3507213c178649f9llu, 0x9faf035d662f60aellu)\n };\n}\n\nstd::vector MakeDateTime64s(size_t scale, size_t values_size) {\n const auto seconds_multiplier = static_cast(std::pow(10, scale));\n const auto year = 86400ull * 365 * seconds_multiplier; // ~approx, but this doesn't matter here.\n\n", "meta": {"hash_id": "3d8dcd2c9c15e0c34b53b5764b7b7d8667fcecb68d2142934efa059f7b98244e"}}], "meta": {"doctype": "codebase", "relative_path": "/ut/value_generators.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "How can I append a column to a ColumnTuple?", "answer": "You can append a column to a ColumnTuple using the Append() function. It takes a ColumnRef as an argument and appends the content of the given column to the end of the current ColumnTuple. This is evident from the comment \"Appends content of given column to the end of current one.\"", "golden_doc_uuids": ["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787"], "golden_chunk_uuids": [["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", 0]], "golden_documents": [{"uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\nprivate:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/tuple.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 0, "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "8c42e2a460d8c4e7ed88c45bf6c664bfaf9956f370886c64480e0410b5c6db6e"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 1, "content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n", "meta": {"hash_id": "54fe492ecc3292a99623a204101bf7dc029b1994d3a6f28f69c8f63c79d8f6a9"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 2, "content": "private:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n", "meta": {"hash_id": "7bd6201d4ec2981142a2de84084596db44ba5486da7111f3cc57a7ed145bc140"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 3, "content": " ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n", "meta": {"hash_id": "0d2142db8e881f54f9b9d51870a05a432a2c9d44d8be18c032da193f9c7388f5"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 4, "content": " /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n", "meta": {"hash_id": "65286fb86bd03d7857b0cb026399fbf7d0a77b11c0995acedfc58c1beb3029a6"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 5, "content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\n", "meta": {"hash_id": "4efb9eae954d345a4b1f1a3cfc012e1afffee3ecfe43fa82a0ecb5982be48f96"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 6, "content": "private:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n", "meta": {"hash_id": "85d871e957ab6ef3081183732e2744ecc5fa2ad1b2c6ccb352acc8d3b04b579d"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 7, "content": " template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n", "meta": {"hash_id": "2f9d4dd1a63c5939e26cf77df59f9a61a81c2feb5a8a14f848f399edc19f9327"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 8, "content": " template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n", "meta": {"hash_id": "182af85626a5a380d7e99d70850fbaee7acb5e7ea840bc0949cf2c1edc07b66d"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 9, "content": " template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "meta": {"hash_id": "ab64b1006ac9c442009a8ec36c4f7d153a90fc92bbc469b28edb7be86034deff"}}]}], "golden_chunks": [{"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 0, "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "8c42e2a460d8c4e7ed88c45bf6c664bfaf9956f370886c64480e0410b5c6db6e"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/tuple.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "How do I load column data from an input stream into a ColumnTuple?", "answer": "You can load column data from an input stream into a ColumnTuple using the LoadBody() function. It takes an InputStream pointer and the number of rows as arguments. The function returns a boolean value indicating the success of the operation. This is based on the comment \"Loads column data from input stream.\"", "golden_doc_uuids": ["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787"], "golden_chunk_uuids": [["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", 1], ["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", 0]], "golden_documents": [{"uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\nprivate:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/tuple.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 0, "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "8c42e2a460d8c4e7ed88c45bf6c664bfaf9956f370886c64480e0410b5c6db6e"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 1, "content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n", "meta": {"hash_id": "54fe492ecc3292a99623a204101bf7dc029b1994d3a6f28f69c8f63c79d8f6a9"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 2, "content": "private:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n", "meta": {"hash_id": "7bd6201d4ec2981142a2de84084596db44ba5486da7111f3cc57a7ed145bc140"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 3, "content": " ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n", "meta": {"hash_id": "0d2142db8e881f54f9b9d51870a05a432a2c9d44d8be18c032da193f9c7388f5"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 4, "content": " /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n", "meta": {"hash_id": "65286fb86bd03d7857b0cb026399fbf7d0a77b11c0995acedfc58c1beb3029a6"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 5, "content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\n", "meta": {"hash_id": "4efb9eae954d345a4b1f1a3cfc012e1afffee3ecfe43fa82a0ecb5982be48f96"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 6, "content": "private:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n", "meta": {"hash_id": "85d871e957ab6ef3081183732e2744ecc5fa2ad1b2c6ccb352acc8d3b04b579d"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 7, "content": " template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n", "meta": {"hash_id": "2f9d4dd1a63c5939e26cf77df59f9a61a81c2feb5a8a14f848f399edc19f9327"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 8, "content": " template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n", "meta": {"hash_id": "182af85626a5a380d7e99d70850fbaee7acb5e7ea840bc0949cf2c1edc07b66d"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 9, "content": " template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "meta": {"hash_id": "ab64b1006ac9c442009a8ec36c4f7d153a90fc92bbc469b28edb7be86034deff"}}]}], "golden_chunks": [{"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 1, "content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n", "meta": {"hash_id": "54fe492ecc3292a99623a204101bf7dc029b1994d3a6f28f69c8f63c79d8f6a9"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 0, "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "8c42e2a460d8c4e7ed88c45bf6c664bfaf9956f370886c64480e0410b5c6db6e"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/tuple.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "How can I clear the data in a ColumnTuple?", "answer": "You can clear the data in a ColumnTuple by calling the Clear() function. It is an overridden function from the base Column class, as indicated by the comment \"Clear column data.\"", "golden_doc_uuids": ["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787"], "golden_chunk_uuids": [["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", 1]], "golden_documents": [{"uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\nprivate:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/tuple.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 0, "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "8c42e2a460d8c4e7ed88c45bf6c664bfaf9956f370886c64480e0410b5c6db6e"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 1, "content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n", "meta": {"hash_id": "54fe492ecc3292a99623a204101bf7dc029b1994d3a6f28f69c8f63c79d8f6a9"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 2, "content": "private:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n", "meta": {"hash_id": "7bd6201d4ec2981142a2de84084596db44ba5486da7111f3cc57a7ed145bc140"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 3, "content": " ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n", "meta": {"hash_id": "0d2142db8e881f54f9b9d51870a05a432a2c9d44d8be18c032da193f9c7388f5"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 4, "content": " /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n", "meta": {"hash_id": "65286fb86bd03d7857b0cb026399fbf7d0a77b11c0995acedfc58c1beb3029a6"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 5, "content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\n", "meta": {"hash_id": "4efb9eae954d345a4b1f1a3cfc012e1afffee3ecfe43fa82a0ecb5982be48f96"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 6, "content": "private:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n", "meta": {"hash_id": "85d871e957ab6ef3081183732e2744ecc5fa2ad1b2c6ccb352acc8d3b04b579d"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 7, "content": " template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n", "meta": {"hash_id": "2f9d4dd1a63c5939e26cf77df59f9a61a81c2feb5a8a14f848f399edc19f9327"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 8, "content": " template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n", "meta": {"hash_id": "182af85626a5a380d7e99d70850fbaee7acb5e7ea840bc0949cf2c1edc07b66d"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 9, "content": " template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "meta": {"hash_id": "ab64b1006ac9c442009a8ec36c4f7d153a90fc92bbc469b28edb7be86034deff"}}]}], "golden_chunks": [{"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 1, "content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n", "meta": {"hash_id": "54fe492ecc3292a99623a204101bf7dc029b1994d3a6f28f69c8f63c79d8f6a9"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/tuple.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "How do I get the number of rows in a ColumnTuple?", "answer": "You can get the number of rows in a ColumnTuple by calling the Size() function. It returns the count of rows in the column, as mentioned in the comment \"Returns count of rows in the column.\"", "golden_doc_uuids": ["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787"], "golden_chunk_uuids": [["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", 1], ["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", 0]], "golden_documents": [{"uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\nprivate:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/tuple.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 0, "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "8c42e2a460d8c4e7ed88c45bf6c664bfaf9956f370886c64480e0410b5c6db6e"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 1, "content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n", "meta": {"hash_id": "54fe492ecc3292a99623a204101bf7dc029b1994d3a6f28f69c8f63c79d8f6a9"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 2, "content": "private:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n", "meta": {"hash_id": "7bd6201d4ec2981142a2de84084596db44ba5486da7111f3cc57a7ed145bc140"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 3, "content": " ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n", "meta": {"hash_id": "0d2142db8e881f54f9b9d51870a05a432a2c9d44d8be18c032da193f9c7388f5"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 4, "content": " /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n", "meta": {"hash_id": "65286fb86bd03d7857b0cb026399fbf7d0a77b11c0995acedfc58c1beb3029a6"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 5, "content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\n", "meta": {"hash_id": "4efb9eae954d345a4b1f1a3cfc012e1afffee3ecfe43fa82a0ecb5982be48f96"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 6, "content": "private:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n", "meta": {"hash_id": "85d871e957ab6ef3081183732e2744ecc5fa2ad1b2c6ccb352acc8d3b04b579d"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 7, "content": " template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n", "meta": {"hash_id": "2f9d4dd1a63c5939e26cf77df59f9a61a81c2feb5a8a14f848f399edc19f9327"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 8, "content": " template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n", "meta": {"hash_id": "182af85626a5a380d7e99d70850fbaee7acb5e7ea840bc0949cf2c1edc07b66d"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 9, "content": " template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "meta": {"hash_id": "ab64b1006ac9c442009a8ec36c4f7d153a90fc92bbc469b28edb7be86034deff"}}]}], "golden_chunks": [{"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 1, "content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n", "meta": {"hash_id": "54fe492ecc3292a99623a204101bf7dc029b1994d3a6f28f69c8f63c79d8f6a9"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 0, "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "8c42e2a460d8c4e7ed88c45bf6c664bfaf9956f370886c64480e0410b5c6db6e"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/tuple.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the ColumnTupleT class?", "answer": "The ColumnTupleT class is a templated subclass of ColumnTuple that provides additional functionality and type safety. It allows creating a ColumnTuple with specific column types, accessing values using the At() function or operator[], appending values using the Append() function with a tuple argument, and wrapping a ColumnTuple or Column into a ColumnTupleT.", "golden_doc_uuids": ["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787"], "golden_chunk_uuids": [["6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", 2]], "golden_documents": [{"uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\nprivate:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/tuple.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 0, "content": "#pragma once\n\n#include \"column.h\"\n#include \"utils.h\"\n\n#include \n\nnamespace clickhouse {\n\n/**\n * Represents column of Tuple([T]).\n */\nclass ColumnTuple : public Column {\npublic:\n ColumnTuple(const std::vector& columns);\n\n /// Returns count of columns in the tuple.\n size_t TupleSize() const;\n\n inline ColumnRef operator [] (size_t n) const {\n return columns_[n];\n }\n\n inline ColumnRef At(size_t n) const {\n return columns_[n];\n }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "8c42e2a460d8c4e7ed88c45bf6c664bfaf9956f370886c64480e0410b5c6db6e"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 1, "content": " /// Loads column prefix from input stream.\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t, size_t) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n", "meta": {"hash_id": "54fe492ecc3292a99623a204101bf7dc029b1994d3a6f28f69c8f63c79d8f6a9"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 2, "content": "private:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n", "meta": {"hash_id": "7bd6201d4ec2981142a2de84084596db44ba5486da7111f3cc57a7ed145bc140"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 3, "content": " ColumnTupleT(std::vector columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n ColumnTupleT(const std::initializer_list columns)\n : ColumnTuple(columns), typed_columns_(VectorToTuple(std::move(columns))) {}\n\n inline ValueType At(size_t index) const { return GetTupleOfValues(index); }\n\n inline ValueType operator[](size_t index) const { return GetTupleOfValues(index); }\n\n using ColumnTuple::Append;\n\n template \n inline void Append(std::tuple value) {\n AppendTuple(std::move(value));\n }\n\n", "meta": {"hash_id": "0d2142db8e881f54f9b9d51870a05a432a2c9d44d8be18c032da193f9c7388f5"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 4, "content": " /** Create a ColumnTupleT from a ColumnTuple, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnTuple&& col) {\n if (col.TupleSize() != std::tuple_size_v) {\n throw ValidationError(\"Can't wrap from \" + col.GetType().GetName());\n }\n return std::make_shared>(VectorToTuple(std::move(col)));\n }\n\n", "meta": {"hash_id": "65286fb86bd03d7857b0cb026399fbf7d0a77b11c0995acedfc58c1beb3029a6"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 5, "content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnTuple::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnTuple::CloneEmpty()); }\n\n void Swap(Column& other) override {\n auto& col = dynamic_cast&>(other);\n typed_columns_.swap(col.typed_columns_);\n ColumnTuple::Swap(other);\n }\n\n", "meta": {"hash_id": "4efb9eae954d345a4b1f1a3cfc012e1afffee3ecfe43fa82a0ecb5982be48f96"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 6, "content": "private:\n template >\n inline void AppendTuple([[maybe_unused]] T value) {\n static_assert(index <= std::tuple_size_v);\n static_assert(std::tuple_size_v == std::tuple_size_v);\n if constexpr (index == 0) {\n return;\n } else {\n std::get(typed_columns_)->Append(std::move(std::get(value)));\n AppendTuple(std::move(value));\n }\n }\n\n", "meta": {"hash_id": "85d871e957ab6ef3081183732e2744ecc5fa2ad1b2c6ccb352acc8d3b04b579d"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 7, "content": " template >\n inline static std::vector TupleToVector([[maybe_unused]] const T& value) {\n static_assert(index <= std::tuple_size_v);\n if constexpr (index == 0) {\n std::vector result;\n result.reserve(std::tuple_size_v);\n return result;\n } else {\n auto result = TupleToVector(value);\n result.push_back(std::get(value));\n return result;\n }\n }\n\n", "meta": {"hash_id": "2f9d4dd1a63c5939e26cf77df59f9a61a81c2feb5a8a14f848f399edc19f9327"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 8, "content": " template >\n inline static auto VectorToTuple([[maybe_unused]] T columns) {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n using ColumnType =\n typename std::tuple_element::type::element_type;\n auto column = WrapColumn(columns[column_index - 1]);\n return std::tuple_cat(std::move(VectorToTuple(std::move(columns))),\n std::make_tuple(std::move(column)));\n }\n }\n\n", "meta": {"hash_id": "182af85626a5a380d7e99d70850fbaee7acb5e7ea840bc0949cf2c1edc07b66d"}}, {"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 9, "content": " template >\n inline auto GetTupleOfValues([[maybe_unused]]size_t index) const {\n static_assert(column_index <= std::tuple_size_v);\n if constexpr (column_index == 0) {\n return std::make_tuple();\n } else {\n return std::tuple_cat(\n std::move(GetTupleOfValues(index)),\n std::move(std::make_tuple(std::get(typed_columns_)->At(index))));\n }\n }\n\n TupleOfColumns typed_columns_;\n};\n\n} // namespace clickhouse\n", "meta": {"hash_id": "ab64b1006ac9c442009a8ec36c4f7d153a90fc92bbc469b28edb7be86034deff"}}]}], "golden_chunks": [{"doc_uuid": "6ffb0cf236934c884639effc308d9cb67a7cd33d849153664bd70cc5b7dc6787", "index": 2, "content": "private:\n std::vector columns_;\n};\n\ntemplate \nclass ColumnTupleT : public ColumnTuple {\npublic:\n using TupleOfColumns = std::tuple...>;\n\n using ValueType = std::tuple().At(0))>...>;\n\n ColumnTupleT(std::tuple...> columns)\n : ColumnTuple(TupleToVector(columns)), typed_columns_(std::move(columns)) {}\n\n", "meta": {"hash_id": "7bd6201d4ec2981142a2de84084596db44ba5486da7111f3cc57a7ed145bc140"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/tuple.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "How can you append elements to a ColumnIPv4 instance?", "answer": "The ColumnIPv4 class provides several methods to append elements:\na. Append an IP address as a string: void Append(const std::string& ip);\nb. Append an IP address as a uint32_t value in host byte order: void Append(uint32_t ip);\nc. Append an IP address as an in_addr value: void Append(in_addr ip);", "golden_doc_uuids": ["68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c"], "golden_chunk_uuids": [["68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", 1]], "golden_documents": [{"uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/ip4.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 0, "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n", "meta": {"hash_id": "7966077e8660ae1902d396440a53f6657754a52d7f5d583b2824659c6380767e"}}, {"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 1, "content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "7218d670924fc9f6f0921918f333b12e7be386f959360cf6b8c39e04a30a5715"}}, {"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 2, "content": " /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"hash_id": "b46000c63fd71aa256374d07cf7dd572fb0a8ac27f6594a0f08defa751524137"}}]}], "golden_chunks": [{"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 1, "content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "7218d670924fc9f6f0921918f333b12e7be386f959360cf6b8c39e04a30a5715"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/ip4.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How can you access elements from a ColumnIPv4 instance?", "answer": "You can access elements from a ColumnIPv4 instance using the following methods:\na. Get the element at a given row number: in_addr At(size_t n) const;\nb. Get the element at a given row number using the array operator: in_addr operator [] (size_t n) const;\nc. Get the element as a string at a given row number: std::string AsString(size_t n) const;", "golden_doc_uuids": ["68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c"], "golden_chunk_uuids": [["68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", 1]], "golden_documents": [{"uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/ip4.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 0, "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n", "meta": {"hash_id": "7966077e8660ae1902d396440a53f6657754a52d7f5d583b2824659c6380767e"}}, {"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 1, "content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "7218d670924fc9f6f0921918f333b12e7be386f959360cf6b8c39e04a30a5715"}}, {"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 2, "content": " /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"hash_id": "b46000c63fd71aa256374d07cf7dd572fb0a8ac27f6594a0f08defa751524137"}}]}], "golden_chunks": [{"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 1, "content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "7218d670924fc9f6f0921918f333b12e7be386f959360cf6b8c39e04a30a5715"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/ip4.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How can you append the content of another column to a ColumnIPv4 instance?", "answer": "The void Append(ColumnRef column) method allows appending the content of another column to the end of the current ColumnIPv4 instance.", "golden_doc_uuids": ["68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c"], "golden_chunk_uuids": [["68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", 1]], "golden_documents": [{"uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/ip4.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 0, "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n", "meta": {"hash_id": "7966077e8660ae1902d396440a53f6657754a52d7f5d583b2824659c6380767e"}}, {"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 1, "content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "7218d670924fc9f6f0921918f333b12e7be386f959360cf6b8c39e04a30a5715"}}, {"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 2, "content": " /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"hash_id": "b46000c63fd71aa256374d07cf7dd572fb0a8ac27f6594a0f08defa751524137"}}]}], "golden_chunks": [{"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 1, "content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "7218d670924fc9f6f0921918f333b12e7be386f959360cf6b8c39e04a30a5715"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/ip4.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How can you get the number of rows in a ColumnIPv4 instance?", "answer": "The size_t Size() const method returns the count of rows (elements) in the column.", "golden_doc_uuids": ["68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c"], "golden_chunk_uuids": [["68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", 2]], "golden_documents": [{"uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/ip4.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 0, "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n", "meta": {"hash_id": "7966077e8660ae1902d396440a53f6657754a52d7f5d583b2824659c6380767e"}}, {"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 1, "content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "7218d670924fc9f6f0921918f333b12e7be386f959360cf6b8c39e04a30a5715"}}, {"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 2, "content": " /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"hash_id": "b46000c63fd71aa256374d07cf7dd572fb0a8ac27f6594a0f08defa751524137"}}]}], "golden_chunks": [{"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 2, "content": " /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"hash_id": "b46000c63fd71aa256374d07cf7dd572fb0a8ac27f6594a0f08defa751524137"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/ip4.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How can you create a slice of a ColumnIPv4 instance?", "answer": "The ColumnRef Slice(size_t begin, size_t len) const method allows creating a slice of the current column, starting from the specified begin position and containing len elements.", "golden_doc_uuids": ["68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c"], "golden_chunk_uuids": [["68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", 2]], "golden_documents": [{"uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/ip4.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 0, "content": "#pragma once\n\n#include \"numeric.h\"\n\nstruct in_addr;\n\nnamespace clickhouse {\n\nclass ColumnIPv4 : public Column {\npublic:\n using DataType = in_addr;\n using ValueType = in_addr;\n\n ColumnIPv4();\n /** Takes ownership of the data, expects ColumnUInt32.\n * Modifying memory pointed by `data` from outside is UB.\n *\n * TODO: deprecate and remove as it is too dangerous and error-prone.\n */\n explicit ColumnIPv4(ColumnRef data);\n\n explicit ColumnIPv4(std::vector&& data);\n\n /// Appends one element to the column.\n void Append(const std::string& ip);\n\n", "meta": {"hash_id": "7966077e8660ae1902d396440a53f6657754a52d7f5d583b2824659c6380767e"}}, {"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 1, "content": " /// @params ip numeric value with host byte order.\n void Append(uint32_t ip);\n\n ///\n void Append(in_addr ip);\n\n /// Returns element at given row number.\n in_addr At(size_t n) const;\n\n /// Returns element at given row number.\n in_addr operator [] (size_t n) const;\n\n std::string AsString(size_t n) const;\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n", "meta": {"hash_id": "7218d670924fc9f6f0921918f333b12e7be386f959360cf6b8c39e04a30a5715"}}, {"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 2, "content": " /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"hash_id": "b46000c63fd71aa256374d07cf7dd572fb0a8ac27f6594a0f08defa751524137"}}]}], "golden_chunks": [{"doc_uuid": "68689866af92461ae59fa9f0bc3064ba012e1482bce9a48fc44e1b25a26ac50c", "index": 2, "content": " /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\n ItemView GetItem(size_t index) const override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n}\n", "meta": {"hash_id": "b46000c63fd71aa256374d07cf7dd572fb0a8ac27f6594a0f08defa751524137"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/ip4.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What does the GetTypeMeta() function do?", "answer": "The GetTypeMeta() function takes a type name as input and returns the corresponding TypeAst::Meta enum value. It handles special type names such as \"Array\", \"Null\", \"Nullable\", \"Tuple\", \"Enum8\", \"Enum16\", \"LowCardinality\", \"SimpleAggregateFunction\", and \"Map\". For example:\n\nif (name == \"Array\") {\n return TypeAst::Array;\n}\n\nIf the input type name doesn't match any of the special cases, it returns TypeAst::Terminal.", "golden_doc_uuids": ["8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69"], "golden_chunk_uuids": [["8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", 3]], "golden_documents": [{"uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "content": "#include \"type_parser.h\"\n\n#include \"clickhouse/exceptions.h\"\n#include \"clickhouse/base/platform.h\" // for _win_\n\n#include \n#include \n#include \n#include \n#include \n\n#if defined _win_\n#include \n#else\n#include \n#endif\n\n\nnamespace clickhouse {\n\nbool TypeAst::operator==(const TypeAst & other) const {\n return meta == other.meta\n && code == other.code\n && name == other.name\n && value == other.value\n && std::equal(elements.begin(), elements.end(), other.elements.begin(), other.elements.end());\n}\n\nstatic const std::unordered_map kTypeCode = {\n { \"Void\", Type::Void },\n { \"Int8\", Type::Int8 },\n { \"Int16\", Type::Int16 },\n { \"Int32\", Type::Int32 },\n { \"Int64\", Type::Int64 },\n { \"Bool\", Type::UInt8 },\n { \"UInt8\", Type::UInt8 },\n { \"UInt16\", Type::UInt16 },\n { \"UInt32\", Type::UInt32 },\n { \"UInt64\", Type::UInt64 },\n { \"Float32\", Type::Float32 },\n { \"Float64\", Type::Float64 },\n { \"String\", Type::String },\n { \"FixedString\", Type::FixedString },\n { \"DateTime\", Type::DateTime },\n { \"DateTime64\", Type::DateTime64 },\n { \"Date\", Type::Date },\n { \"Date32\", Type::Date32 },\n { \"Array\", Type::Array },\n { \"Nullable\", Type::Nullable },\n { \"Tuple\", Type::Tuple },\n { \"Enum8\", Type::Enum8 },\n { \"Enum16\", Type::Enum16 },\n { \"UUID\", Type::UUID },\n { \"IPv4\", Type::IPv4 },\n { \"IPv6\", Type::IPv6 },\n { \"Int128\", Type::Int128 },\n// { \"UInt128\", Type::UInt128 },\n { \"Decimal\", Type::Decimal },\n { \"Decimal32\", Type::Decimal32 },\n { \"Decimal64\", Type::Decimal64 },\n { \"Decimal128\", Type::Decimal128 },\n { \"LowCardinality\", Type::LowCardinality },\n { \"Map\", Type::Map },\n { \"Point\", Type::Point },\n { \"Ring\", Type::Ring },\n { \"Polygon\", Type::Polygon },\n { \"MultiPolygon\", Type::MultiPolygon },\n};\n\ntemplate \ninline int CompateStringsCaseInsensitive(const L& left, const R& right) {\n int64_t size_diff = left.size() - right.size();\n if (size_diff != 0)\n return size_diff > 0 ? 1 : -1;\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n}\n\nstatic Type::Code GetTypeCode(const std::string& name) {\n auto it = kTypeCode.find(name);\n if (it != kTypeCode.end()) {\n return it->second;\n }\n\n return Type::Void;\n}\n\nstatic TypeAst::Meta GetTypeMeta(const StringView& name) {\n if (name == \"Array\") {\n return TypeAst::Array;\n }\n\n if (name == \"Null\") {\n return TypeAst::Null;\n }\n\n if (name == \"Nullable\") {\n return TypeAst::Nullable;\n }\n\n if (name == \"Tuple\") {\n return TypeAst::Tuple;\n }\n\n if (name == \"Enum8\" || name == \"Enum16\") {\n return TypeAst::Enum;\n }\n\n if (name == \"LowCardinality\") {\n return TypeAst::LowCardinality;\n }\n\n if (name == \"SimpleAggregateFunction\") {\n return TypeAst::SimpleAggregateFunction;\n }\n\n if (name == \"Map\") {\n return TypeAst::Map;\n }\n\n return TypeAst::Terminal;\n}\n\nbool ValidateAST(const TypeAst& ast) {\n // Void terminal that is not actually \"void\" produced when unknown type is encountered.\n if (ast.meta == TypeAst::Terminal\n && ast.code == Type::Void\n && CompateStringsCaseInsensitive(ast.name, std::string_view(\"void\")) != 0)\n //throw UnimplementedError(\"Unsupported type: \" + ast.name);\n return false;\n\n return true;\n}\n\n\nTypeParser::TypeParser(const StringView& name)\n : cur_(name.data())\n , end_(name.data() + name.size())\n , type_(nullptr)\n{\n}\n\nTypeParser::~TypeParser() = default;\n\nbool TypeParser::Parse(TypeAst* type) {\n type_ = type;\n open_elements_.push(type_);\n\n size_t processed_tokens = 0;\n do {\n const Token & token = NextToken();\n switch (token.type) {\n case Token::QuotedString:\n {\n type_->meta = TypeAst::Terminal;\n if (token.value.length() < 1)\n type_->value_string = {};\n else\n type_->value_string = token.value.substr(1, token.value.length() - 2).to_string();\n type_->code = Type::String;\n break;\n }\n case Token::Name:\n type_->meta = GetTypeMeta(token.value);\n type_->name = token.value.to_string();\n type_->code = GetTypeCode(type_->name);\n break;\n case Token::Number:\n type_->meta = TypeAst::Number;\n type_->value = std::stol(token.value.to_string());\n break;\n case Token::String:\n type_->meta = TypeAst::String;\n type_->value_string = std::string(token.value);\n break;\n case Token::LPar:\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::RPar:\n type_ = open_elements_.top();\n open_elements_.pop();\n break;\n case Token::Assign:\n case Token::Comma:\n type_ = open_elements_.top();\n open_elements_.pop();\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::EOS:\n {\n // Ubalanced braces, brackets, etc is an error.\n if (open_elements_.size() != 1)\n return false;\n\n // Empty input string, no tokens produced\n if (processed_tokens == 0)\n return false;\n\n return ValidateAST(*type);\n }\n case Token::Invalid:\n return false;\n }\n ++processed_tokens;\n } while (true);\n}\n\nTypeParser::Token TypeParser::NextToken() {\n for (; cur_ < end_; ++cur_) {\n switch (*cur_) {\n case ' ':\n case '\\n':\n case '\\t':\n case '\\0':\n continue;\n case '=':\n return Token{Token::Assign, StringView(cur_++, 1)};\n case '(':\n return Token{Token::LPar, StringView(cur_++, 1)};\n case ')':\n return Token{Token::RPar, StringView(cur_++, 1)};\n case ',':\n return Token{Token::Comma, StringView(cur_++, 1)};\n case '\\'':\n {\n const auto end_quote_length = 1;\n const StringView end_quote{cur_, end_quote_length};\n // Fast forward to the closing quote.\n const auto start = cur_++;\n for (; cur_ < end_ - end_quote_length; ++cur_) {\n // TODO (nemkov): handle escaping ?\n if (end_quote == StringView{cur_, end_quote_length}) {\n cur_ += end_quote_length;\n\n return Token{Token::QuotedString, StringView{start, cur_}};\n }\n }\n return Token{Token::QuotedString, StringView(cur_++, 1)};\n }\n\n default: {\n const char* st = cur_;\n\n if (*cur_ == '\\'') {\n for (st = ++cur_; cur_ < end_; ++cur_) {\n if (*cur_ == '\\'') {\n return Token{Token::String, StringView(st, cur_++ - st)};\n }\n }\n\n return Token{Token::Invalid, StringView()};\n }\n\n if (isalpha(*cur_) || *cur_ == '_') {\n for (; cur_ < end_; ++cur_) {\n if (!isalpha(*cur_) && !isdigit(*cur_) && *cur_ != '_') {\n break;\n }\n }\n\n return Token{Token::Name, StringView(st, cur_)};\n }\n\n if (isdigit(*cur_) || *cur_ == '-') {\n for (++cur_; cur_ < end_; ++cur_) {\n if (!isdigit(*cur_)) {\n break;\n }\n }\n\n return Token{Token::Number, StringView(st, cur_)};\n }\n\n return Token{Token::Invalid, StringView()};\n }\n }\n }\n\n return Token{Token::EOS, StringView()};\n}\n\n\nconst TypeAst* ParseTypeName(const std::string& type_name) {\n // Cache for type_name.\n // Usually we won't have too many type names in the cache, so do not try to\n // limit cache size.\n static std::map ast_cache;\n static std::mutex lock;\n\n std::lock_guard guard(lock);\n auto it = ast_cache.find(type_name);\n if (it != ast_cache.end()) {\n return &it->second;\n }\n\n auto& ast = ast_cache[type_name];\n if (TypeParser(type_name).Parse(&ast)) {\n return *\n }\n ast_cache.erase(type_name);\n return nullptr;\n}\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/types/type_parser.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 0, "content": "#include \"type_parser.h\"\n\n#include \"clickhouse/exceptions.h\"\n#include \"clickhouse/base/platform.h\" // for _win_\n\n#include \n#include \n#include \n#include \n#include \n\n#if defined _win_\n#include \n#else\n#include \n#endif\n\n\nnamespace clickhouse {\n\nbool TypeAst::operator==(const TypeAst & other) const {\n return meta == other.meta\n && code == other.code\n && name == other.name\n && value == other.value\n && std::equal(elements.begin(), elements.end(), other.elements.begin(), other.elements.end());\n}\n\n", "meta": {"hash_id": "4010ccada7eac26cd15530737da54dc1d72f46a119cd16a0c2ff1f9428407618"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 1, "content": "static const std::unordered_map kTypeCode = {\n { \"Void\", Type::Void },\n { \"Int8\", Type::Int8 },\n { \"Int16\", Type::Int16 },\n { \"Int32\", Type::Int32 },\n { \"Int64\", Type::Int64 },\n { \"Bool\", Type::UInt8 },\n { \"UInt8\", Type::UInt8 },\n { \"UInt16\", Type::UInt16 },\n { \"UInt32\", Type::UInt32 },\n { \"UInt64\", Type::UInt64 },\n { \"Float32\", Type::Float32 },\n { \"Float64\", Type::Float64 },\n { \"String\", Type::String },\n { \"FixedString\", Type::FixedString },\n { \"DateTime\", Type::DateTime },\n { \"DateTime64\", Type::DateTime64 },\n", "meta": {"hash_id": "33df7decc9113f3ee471ecbeaf0ea153ac42ea13483644faee03f9f641bd2a3d"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 2, "content": " { \"Date\", Type::Date },\n { \"Date32\", Type::Date32 },\n { \"Array\", Type::Array },\n { \"Nullable\", Type::Nullable },\n { \"Tuple\", Type::Tuple },\n { \"Enum8\", Type::Enum8 },\n { \"Enum16\", Type::Enum16 },\n { \"UUID\", Type::UUID },\n { \"IPv4\", Type::IPv4 },\n { \"IPv6\", Type::IPv6 },\n { \"Int128\", Type::Int128 },\n// { \"UInt128\", Type::UInt128 },\n { \"Decimal\", Type::Decimal },\n { \"Decimal32\", Type::Decimal32 },\n { \"Decimal64\", Type::Decimal64 },\n { \"Decimal128\", Type::Decimal128 },\n { \"LowCardinality\", Type::LowCardinality },\n { \"Map\", Type::Map },\n { \"Point\", Type::Point },\n { \"Ring\", Type::Ring },\n { \"Polygon\", Type::Polygon },\n { \"MultiPolygon\", Type::MultiPolygon },\n};\n\n", "meta": {"hash_id": "e8197f34e5e2eb317b6a3d810b2ba1afcb5df8e960df0271edd77aef7a3d0c8f"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 3, "content": "template \ninline int CompateStringsCaseInsensitive(const L& left, const R& right) {\n int64_t size_diff = left.size() - right.size();\n if (size_diff != 0)\n return size_diff > 0 ? 1 : -1;\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n}\n\nstatic Type::Code GetTypeCode(const std::string& name) {\n auto it = kTypeCode.find(name);\n if (it != kTypeCode.end()) {\n return it->second;\n }\n\n return Type::Void;\n}\n\nstatic TypeAst::Meta GetTypeMeta(const StringView& name) {\n if (name == \"Array\") {\n return TypeAst::Array;\n }\n\n", "meta": {"hash_id": "1692409c14d9a59a83bf7d23ea1cb3c3d3419942cb43c7bc3082703534042c26"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 4, "content": " if (name == \"Null\") {\n return TypeAst::Null;\n }\n\n if (name == \"Nullable\") {\n return TypeAst::Nullable;\n }\n\n if (name == \"Tuple\") {\n return TypeAst::Tuple;\n }\n\n if (name == \"Enum8\" || name == \"Enum16\") {\n return TypeAst::Enum;\n }\n\n if (name == \"LowCardinality\") {\n return TypeAst::LowCardinality;\n }\n\n if (name == \"SimpleAggregateFunction\") {\n return TypeAst::SimpleAggregateFunction;\n }\n\n if (name == \"Map\") {\n return TypeAst::Map;\n }\n\n return TypeAst::Terminal;\n}\n\n", "meta": {"hash_id": "b6f3095791bc44adfe77186689044796c8ab39d1ceb53175f14c155a7638bb10"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 5, "content": "bool ValidateAST(const TypeAst& ast) {\n // Void terminal that is not actually \"void\" produced when unknown type is encountered.\n if (ast.meta == TypeAst::Terminal\n && ast.code == Type::Void\n && CompateStringsCaseInsensitive(ast.name, std::string_view(\"void\")) != 0)\n //throw UnimplementedError(\"Unsupported type: \" + ast.name);\n return false;\n\n return true;\n}\n\n\nTypeParser::TypeParser(const StringView& name)\n : cur_(name.data())\n , end_(name.data() + name.size())\n , type_(nullptr)\n{\n}\n\nTypeParser::~TypeParser() = default;\n\nbool TypeParser::Parse(TypeAst* type) {\n type_ = type;\n open_elements_.push(type_);\n\n", "meta": {"hash_id": "ad1f48bd7a87f7c00132c97da10bd0cf61106484509000b3290a1858cc9583d4"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 6, "content": " size_t processed_tokens = 0;\n do {\n const Token & token = NextToken();\n switch (token.type) {\n case Token::QuotedString:\n {\n type_->meta = TypeAst::Terminal;\n if (token.value.length() < 1)\n type_->value_string = {};\n else\n type_->value_string = token.value.substr(1, token.value.length() - 2).to_string();\n", "meta": {"hash_id": "41e0bae907565d10a49f1bc77fc56afa79820400288763166ea3958d617a7b85"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 7, "content": " type_->code = Type::String;\n break;\n }\n case Token::Name:\n type_->meta = GetTypeMeta(token.value);\n type_->name = token.value.to_string();\n type_->code = GetTypeCode(type_->name);\n break;\n case Token::Number:\n type_->meta = TypeAst::Number;\n type_->value = std::stol(token.value.to_string());\n break;\n case Token::String:\n type_->meta = TypeAst::String;\n type_->value_string = std::string(token.value);\n", "meta": {"hash_id": "649472394d65eb741c09bb08beda544bb6dbb4bf2fdb9d58e89921370b57602f"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 8, "content": " break;\n case Token::LPar:\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::RPar:\n type_ = open_elements_.top();\n open_elements_.pop();\n break;\n case Token::Assign:\n case Token::Comma:\n type_ = open_elements_.top();\n open_elements_.pop();\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::EOS:\n {\n // Ubalanced braces, brackets, etc is an error.\n if (open_elements_.size() != 1)\n return false;\n\n", "meta": {"hash_id": "3925cac139c7ea36c7598d956e86313ed7173934528725f4e33d3e89be663959"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 9, "content": " // Empty input string, no tokens produced\n if (processed_tokens == 0)\n return false;\n\n return ValidateAST(*type);\n }\n case Token::Invalid:\n return false;\n }\n ++processed_tokens;\n } while (true);\n}\n\nTypeParser::Token TypeParser::NextToken() {\n for (; cur_ < end_; ++cur_) {\n switch (*cur_) {\n case ' ':\n case '\\n':\n case '\\t':\n case '\\0':\n continue;\n case '=':\n return Token{Token::Assign, StringView(cur_++, 1)};\n case '(':\n return Token{Token::LPar, StringView(cur_++, 1)};\n case ')':\n return Token{Token::RPar, StringView(cur_++, 1)};\n", "meta": {"hash_id": "25de7fc5aa147adf519d6bbaba8565a1e4b4a476b87be31dc98dee21de942834"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 10, "content": " case ',':\n return Token{Token::Comma, StringView(cur_++, 1)};\n case '\\'':\n {\n const auto end_quote_length = 1;\n const StringView end_quote{cur_, end_quote_length};\n // Fast forward to the closing quote.\n const auto start = cur_++;\n for (; cur_ < end_ - end_quote_length; ++cur_) {\n // TODO (nemkov): handle escaping ?\n if (end_quote == StringView{cur_, end_quote_length}) {\n cur_ += end_quote_length;\n\n", "meta": {"hash_id": "ef74516a17f133764cad36f716e077d307874165a81172da4796fcef0f1d24f0"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 11, "content": " return Token{Token::QuotedString, StringView{start, cur_}};\n }\n }\n return Token{Token::QuotedString, StringView(cur_++, 1)};\n }\n\n default: {\n const char* st = cur_;\n\n if (*cur_ == '\\'') {\n for (st = ++cur_; cur_ < end_; ++cur_) {\n if (*cur_ == '\\'') {\n return Token{Token::String, StringView(st, cur_++ - st)};\n }\n }\n\n", "meta": {"hash_id": "77f02f81fbc529b64493ba59df6a5305241848619c4d52992d16b1a2e5a7a896"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 12, "content": " return Token{Token::Invalid, StringView()};\n }\n\n if (isalpha(*cur_) || *cur_ == '_') {\n for (; cur_ < end_; ++cur_) {\n if (!isalpha(*cur_) && !isdigit(*cur_) && *cur_ != '_') {\n break;\n }\n }\n\n return Token{Token::Name, StringView(st, cur_)};\n }\n\n if (isdigit(*cur_) || *cur_ == '-') {\n for (++cur_; cur_ < end_; ++cur_) {\n if (!isdigit(*cur_)) {\n break;\n }\n }\n\n", "meta": {"hash_id": "e0f49fe7ad4d886ef3a9b520cd2d53e73b71782bd32cb9d87526d780f024d39f"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 13, "content": " return Token{Token::Number, StringView(st, cur_)};\n }\n\n return Token{Token::Invalid, StringView()};\n }\n }\n }\n\n return Token{Token::EOS, StringView()};\n}\n\n\nconst TypeAst* ParseTypeName(const std::string& type_name) {\n // Cache for type_name.\n // Usually we won't have too many type names in the cache, so do not try to\n // limit cache size.\n static std::map ast_cache;\n static std::mutex lock;\n\n std::lock_guard guard(lock);\n auto it = ast_cache.find(type_name);\n if (it != ast_cache.end()) {\n return &it->second;\n }\n\n auto& ast = ast_cache[type_name];\n if (TypeParser(type_name).Parse(&ast)) {\n return *\n }\n ast_cache.erase(type_name);\n return nullptr;\n}\n\n}\n", "meta": {"hash_id": "3646c0d73dd1837525da158bc9a513e52e8f341bbd45fa79a7814706e2ab91eb"}}]}], "golden_chunks": [{"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 3, "content": "template \ninline int CompateStringsCaseInsensitive(const L& left, const R& right) {\n int64_t size_diff = left.size() - right.size();\n if (size_diff != 0)\n return size_diff > 0 ? 1 : -1;\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n}\n\nstatic Type::Code GetTypeCode(const std::string& name) {\n auto it = kTypeCode.find(name);\n if (it != kTypeCode.end()) {\n return it->second;\n }\n\n return Type::Void;\n}\n\nstatic TypeAst::Meta GetTypeMeta(const StringView& name) {\n if (name == \"Array\") {\n return TypeAst::Array;\n }\n\n", "meta": {"hash_id": "1692409c14d9a59a83bf7d23ea1cb3c3d3419942cb43c7bc3082703534042c26"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/types/type_parser.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "How does the CompateStringsCaseInsensitive() function compare two strings case-insensitively?", "answer": "The CompateStringsCaseInsensitive() function compares two strings case-insensitively by first checking their size difference. If the sizes are different, it returns 1 if the left string is longer, or -1 if the right string is longer. If the sizes are equal, it uses platform-specific functions to perform a case-insensitive comparison:\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n\nOn Windows, it uses _strnicmp(), while on other platforms, it uses strncasecmp().", "golden_doc_uuids": ["8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69"], "golden_chunk_uuids": [["8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", 3]], "golden_documents": [{"uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "content": "#include \"type_parser.h\"\n\n#include \"clickhouse/exceptions.h\"\n#include \"clickhouse/base/platform.h\" // for _win_\n\n#include \n#include \n#include \n#include \n#include \n\n#if defined _win_\n#include \n#else\n#include \n#endif\n\n\nnamespace clickhouse {\n\nbool TypeAst::operator==(const TypeAst & other) const {\n return meta == other.meta\n && code == other.code\n && name == other.name\n && value == other.value\n && std::equal(elements.begin(), elements.end(), other.elements.begin(), other.elements.end());\n}\n\nstatic const std::unordered_map kTypeCode = {\n { \"Void\", Type::Void },\n { \"Int8\", Type::Int8 },\n { \"Int16\", Type::Int16 },\n { \"Int32\", Type::Int32 },\n { \"Int64\", Type::Int64 },\n { \"Bool\", Type::UInt8 },\n { \"UInt8\", Type::UInt8 },\n { \"UInt16\", Type::UInt16 },\n { \"UInt32\", Type::UInt32 },\n { \"UInt64\", Type::UInt64 },\n { \"Float32\", Type::Float32 },\n { \"Float64\", Type::Float64 },\n { \"String\", Type::String },\n { \"FixedString\", Type::FixedString },\n { \"DateTime\", Type::DateTime },\n { \"DateTime64\", Type::DateTime64 },\n { \"Date\", Type::Date },\n { \"Date32\", Type::Date32 },\n { \"Array\", Type::Array },\n { \"Nullable\", Type::Nullable },\n { \"Tuple\", Type::Tuple },\n { \"Enum8\", Type::Enum8 },\n { \"Enum16\", Type::Enum16 },\n { \"UUID\", Type::UUID },\n { \"IPv4\", Type::IPv4 },\n { \"IPv6\", Type::IPv6 },\n { \"Int128\", Type::Int128 },\n// { \"UInt128\", Type::UInt128 },\n { \"Decimal\", Type::Decimal },\n { \"Decimal32\", Type::Decimal32 },\n { \"Decimal64\", Type::Decimal64 },\n { \"Decimal128\", Type::Decimal128 },\n { \"LowCardinality\", Type::LowCardinality },\n { \"Map\", Type::Map },\n { \"Point\", Type::Point },\n { \"Ring\", Type::Ring },\n { \"Polygon\", Type::Polygon },\n { \"MultiPolygon\", Type::MultiPolygon },\n};\n\ntemplate \ninline int CompateStringsCaseInsensitive(const L& left, const R& right) {\n int64_t size_diff = left.size() - right.size();\n if (size_diff != 0)\n return size_diff > 0 ? 1 : -1;\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n}\n\nstatic Type::Code GetTypeCode(const std::string& name) {\n auto it = kTypeCode.find(name);\n if (it != kTypeCode.end()) {\n return it->second;\n }\n\n return Type::Void;\n}\n\nstatic TypeAst::Meta GetTypeMeta(const StringView& name) {\n if (name == \"Array\") {\n return TypeAst::Array;\n }\n\n if (name == \"Null\") {\n return TypeAst::Null;\n }\n\n if (name == \"Nullable\") {\n return TypeAst::Nullable;\n }\n\n if (name == \"Tuple\") {\n return TypeAst::Tuple;\n }\n\n if (name == \"Enum8\" || name == \"Enum16\") {\n return TypeAst::Enum;\n }\n\n if (name == \"LowCardinality\") {\n return TypeAst::LowCardinality;\n }\n\n if (name == \"SimpleAggregateFunction\") {\n return TypeAst::SimpleAggregateFunction;\n }\n\n if (name == \"Map\") {\n return TypeAst::Map;\n }\n\n return TypeAst::Terminal;\n}\n\nbool ValidateAST(const TypeAst& ast) {\n // Void terminal that is not actually \"void\" produced when unknown type is encountered.\n if (ast.meta == TypeAst::Terminal\n && ast.code == Type::Void\n && CompateStringsCaseInsensitive(ast.name, std::string_view(\"void\")) != 0)\n //throw UnimplementedError(\"Unsupported type: \" + ast.name);\n return false;\n\n return true;\n}\n\n\nTypeParser::TypeParser(const StringView& name)\n : cur_(name.data())\n , end_(name.data() + name.size())\n , type_(nullptr)\n{\n}\n\nTypeParser::~TypeParser() = default;\n\nbool TypeParser::Parse(TypeAst* type) {\n type_ = type;\n open_elements_.push(type_);\n\n size_t processed_tokens = 0;\n do {\n const Token & token = NextToken();\n switch (token.type) {\n case Token::QuotedString:\n {\n type_->meta = TypeAst::Terminal;\n if (token.value.length() < 1)\n type_->value_string = {};\n else\n type_->value_string = token.value.substr(1, token.value.length() - 2).to_string();\n type_->code = Type::String;\n break;\n }\n case Token::Name:\n type_->meta = GetTypeMeta(token.value);\n type_->name = token.value.to_string();\n type_->code = GetTypeCode(type_->name);\n break;\n case Token::Number:\n type_->meta = TypeAst::Number;\n type_->value = std::stol(token.value.to_string());\n break;\n case Token::String:\n type_->meta = TypeAst::String;\n type_->value_string = std::string(token.value);\n break;\n case Token::LPar:\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::RPar:\n type_ = open_elements_.top();\n open_elements_.pop();\n break;\n case Token::Assign:\n case Token::Comma:\n type_ = open_elements_.top();\n open_elements_.pop();\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::EOS:\n {\n // Ubalanced braces, brackets, etc is an error.\n if (open_elements_.size() != 1)\n return false;\n\n // Empty input string, no tokens produced\n if (processed_tokens == 0)\n return false;\n\n return ValidateAST(*type);\n }\n case Token::Invalid:\n return false;\n }\n ++processed_tokens;\n } while (true);\n}\n\nTypeParser::Token TypeParser::NextToken() {\n for (; cur_ < end_; ++cur_) {\n switch (*cur_) {\n case ' ':\n case '\\n':\n case '\\t':\n case '\\0':\n continue;\n case '=':\n return Token{Token::Assign, StringView(cur_++, 1)};\n case '(':\n return Token{Token::LPar, StringView(cur_++, 1)};\n case ')':\n return Token{Token::RPar, StringView(cur_++, 1)};\n case ',':\n return Token{Token::Comma, StringView(cur_++, 1)};\n case '\\'':\n {\n const auto end_quote_length = 1;\n const StringView end_quote{cur_, end_quote_length};\n // Fast forward to the closing quote.\n const auto start = cur_++;\n for (; cur_ < end_ - end_quote_length; ++cur_) {\n // TODO (nemkov): handle escaping ?\n if (end_quote == StringView{cur_, end_quote_length}) {\n cur_ += end_quote_length;\n\n return Token{Token::QuotedString, StringView{start, cur_}};\n }\n }\n return Token{Token::QuotedString, StringView(cur_++, 1)};\n }\n\n default: {\n const char* st = cur_;\n\n if (*cur_ == '\\'') {\n for (st = ++cur_; cur_ < end_; ++cur_) {\n if (*cur_ == '\\'') {\n return Token{Token::String, StringView(st, cur_++ - st)};\n }\n }\n\n return Token{Token::Invalid, StringView()};\n }\n\n if (isalpha(*cur_) || *cur_ == '_') {\n for (; cur_ < end_; ++cur_) {\n if (!isalpha(*cur_) && !isdigit(*cur_) && *cur_ != '_') {\n break;\n }\n }\n\n return Token{Token::Name, StringView(st, cur_)};\n }\n\n if (isdigit(*cur_) || *cur_ == '-') {\n for (++cur_; cur_ < end_; ++cur_) {\n if (!isdigit(*cur_)) {\n break;\n }\n }\n\n return Token{Token::Number, StringView(st, cur_)};\n }\n\n return Token{Token::Invalid, StringView()};\n }\n }\n }\n\n return Token{Token::EOS, StringView()};\n}\n\n\nconst TypeAst* ParseTypeName(const std::string& type_name) {\n // Cache for type_name.\n // Usually we won't have too many type names in the cache, so do not try to\n // limit cache size.\n static std::map ast_cache;\n static std::mutex lock;\n\n std::lock_guard guard(lock);\n auto it = ast_cache.find(type_name);\n if (it != ast_cache.end()) {\n return &it->second;\n }\n\n auto& ast = ast_cache[type_name];\n if (TypeParser(type_name).Parse(&ast)) {\n return *\n }\n ast_cache.erase(type_name);\n return nullptr;\n}\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/types/type_parser.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 0, "content": "#include \"type_parser.h\"\n\n#include \"clickhouse/exceptions.h\"\n#include \"clickhouse/base/platform.h\" // for _win_\n\n#include \n#include \n#include \n#include \n#include \n\n#if defined _win_\n#include \n#else\n#include \n#endif\n\n\nnamespace clickhouse {\n\nbool TypeAst::operator==(const TypeAst & other) const {\n return meta == other.meta\n && code == other.code\n && name == other.name\n && value == other.value\n && std::equal(elements.begin(), elements.end(), other.elements.begin(), other.elements.end());\n}\n\n", "meta": {"hash_id": "4010ccada7eac26cd15530737da54dc1d72f46a119cd16a0c2ff1f9428407618"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 1, "content": "static const std::unordered_map kTypeCode = {\n { \"Void\", Type::Void },\n { \"Int8\", Type::Int8 },\n { \"Int16\", Type::Int16 },\n { \"Int32\", Type::Int32 },\n { \"Int64\", Type::Int64 },\n { \"Bool\", Type::UInt8 },\n { \"UInt8\", Type::UInt8 },\n { \"UInt16\", Type::UInt16 },\n { \"UInt32\", Type::UInt32 },\n { \"UInt64\", Type::UInt64 },\n { \"Float32\", Type::Float32 },\n { \"Float64\", Type::Float64 },\n { \"String\", Type::String },\n { \"FixedString\", Type::FixedString },\n { \"DateTime\", Type::DateTime },\n { \"DateTime64\", Type::DateTime64 },\n", "meta": {"hash_id": "33df7decc9113f3ee471ecbeaf0ea153ac42ea13483644faee03f9f641bd2a3d"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 2, "content": " { \"Date\", Type::Date },\n { \"Date32\", Type::Date32 },\n { \"Array\", Type::Array },\n { \"Nullable\", Type::Nullable },\n { \"Tuple\", Type::Tuple },\n { \"Enum8\", Type::Enum8 },\n { \"Enum16\", Type::Enum16 },\n { \"UUID\", Type::UUID },\n { \"IPv4\", Type::IPv4 },\n { \"IPv6\", Type::IPv6 },\n { \"Int128\", Type::Int128 },\n// { \"UInt128\", Type::UInt128 },\n { \"Decimal\", Type::Decimal },\n { \"Decimal32\", Type::Decimal32 },\n { \"Decimal64\", Type::Decimal64 },\n { \"Decimal128\", Type::Decimal128 },\n { \"LowCardinality\", Type::LowCardinality },\n { \"Map\", Type::Map },\n { \"Point\", Type::Point },\n { \"Ring\", Type::Ring },\n { \"Polygon\", Type::Polygon },\n { \"MultiPolygon\", Type::MultiPolygon },\n};\n\n", "meta": {"hash_id": "e8197f34e5e2eb317b6a3d810b2ba1afcb5df8e960df0271edd77aef7a3d0c8f"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 3, "content": "template \ninline int CompateStringsCaseInsensitive(const L& left, const R& right) {\n int64_t size_diff = left.size() - right.size();\n if (size_diff != 0)\n return size_diff > 0 ? 1 : -1;\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n}\n\nstatic Type::Code GetTypeCode(const std::string& name) {\n auto it = kTypeCode.find(name);\n if (it != kTypeCode.end()) {\n return it->second;\n }\n\n return Type::Void;\n}\n\nstatic TypeAst::Meta GetTypeMeta(const StringView& name) {\n if (name == \"Array\") {\n return TypeAst::Array;\n }\n\n", "meta": {"hash_id": "1692409c14d9a59a83bf7d23ea1cb3c3d3419942cb43c7bc3082703534042c26"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 4, "content": " if (name == \"Null\") {\n return TypeAst::Null;\n }\n\n if (name == \"Nullable\") {\n return TypeAst::Nullable;\n }\n\n if (name == \"Tuple\") {\n return TypeAst::Tuple;\n }\n\n if (name == \"Enum8\" || name == \"Enum16\") {\n return TypeAst::Enum;\n }\n\n if (name == \"LowCardinality\") {\n return TypeAst::LowCardinality;\n }\n\n if (name == \"SimpleAggregateFunction\") {\n return TypeAst::SimpleAggregateFunction;\n }\n\n if (name == \"Map\") {\n return TypeAst::Map;\n }\n\n return TypeAst::Terminal;\n}\n\n", "meta": {"hash_id": "b6f3095791bc44adfe77186689044796c8ab39d1ceb53175f14c155a7638bb10"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 5, "content": "bool ValidateAST(const TypeAst& ast) {\n // Void terminal that is not actually \"void\" produced when unknown type is encountered.\n if (ast.meta == TypeAst::Terminal\n && ast.code == Type::Void\n && CompateStringsCaseInsensitive(ast.name, std::string_view(\"void\")) != 0)\n //throw UnimplementedError(\"Unsupported type: \" + ast.name);\n return false;\n\n return true;\n}\n\n\nTypeParser::TypeParser(const StringView& name)\n : cur_(name.data())\n , end_(name.data() + name.size())\n , type_(nullptr)\n{\n}\n\nTypeParser::~TypeParser() = default;\n\nbool TypeParser::Parse(TypeAst* type) {\n type_ = type;\n open_elements_.push(type_);\n\n", "meta": {"hash_id": "ad1f48bd7a87f7c00132c97da10bd0cf61106484509000b3290a1858cc9583d4"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 6, "content": " size_t processed_tokens = 0;\n do {\n const Token & token = NextToken();\n switch (token.type) {\n case Token::QuotedString:\n {\n type_->meta = TypeAst::Terminal;\n if (token.value.length() < 1)\n type_->value_string = {};\n else\n type_->value_string = token.value.substr(1, token.value.length() - 2).to_string();\n", "meta": {"hash_id": "41e0bae907565d10a49f1bc77fc56afa79820400288763166ea3958d617a7b85"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 7, "content": " type_->code = Type::String;\n break;\n }\n case Token::Name:\n type_->meta = GetTypeMeta(token.value);\n type_->name = token.value.to_string();\n type_->code = GetTypeCode(type_->name);\n break;\n case Token::Number:\n type_->meta = TypeAst::Number;\n type_->value = std::stol(token.value.to_string());\n break;\n case Token::String:\n type_->meta = TypeAst::String;\n type_->value_string = std::string(token.value);\n", "meta": {"hash_id": "649472394d65eb741c09bb08beda544bb6dbb4bf2fdb9d58e89921370b57602f"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 8, "content": " break;\n case Token::LPar:\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::RPar:\n type_ = open_elements_.top();\n open_elements_.pop();\n break;\n case Token::Assign:\n case Token::Comma:\n type_ = open_elements_.top();\n open_elements_.pop();\n type_->elements.emplace_back(TypeAst());\n open_elements_.push(type_);\n type_ = &type_->elements.back();\n break;\n case Token::EOS:\n {\n // Ubalanced braces, brackets, etc is an error.\n if (open_elements_.size() != 1)\n return false;\n\n", "meta": {"hash_id": "3925cac139c7ea36c7598d956e86313ed7173934528725f4e33d3e89be663959"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 9, "content": " // Empty input string, no tokens produced\n if (processed_tokens == 0)\n return false;\n\n return ValidateAST(*type);\n }\n case Token::Invalid:\n return false;\n }\n ++processed_tokens;\n } while (true);\n}\n\nTypeParser::Token TypeParser::NextToken() {\n for (; cur_ < end_; ++cur_) {\n switch (*cur_) {\n case ' ':\n case '\\n':\n case '\\t':\n case '\\0':\n continue;\n case '=':\n return Token{Token::Assign, StringView(cur_++, 1)};\n case '(':\n return Token{Token::LPar, StringView(cur_++, 1)};\n case ')':\n return Token{Token::RPar, StringView(cur_++, 1)};\n", "meta": {"hash_id": "25de7fc5aa147adf519d6bbaba8565a1e4b4a476b87be31dc98dee21de942834"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 10, "content": " case ',':\n return Token{Token::Comma, StringView(cur_++, 1)};\n case '\\'':\n {\n const auto end_quote_length = 1;\n const StringView end_quote{cur_, end_quote_length};\n // Fast forward to the closing quote.\n const auto start = cur_++;\n for (; cur_ < end_ - end_quote_length; ++cur_) {\n // TODO (nemkov): handle escaping ?\n if (end_quote == StringView{cur_, end_quote_length}) {\n cur_ += end_quote_length;\n\n", "meta": {"hash_id": "ef74516a17f133764cad36f716e077d307874165a81172da4796fcef0f1d24f0"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 11, "content": " return Token{Token::QuotedString, StringView{start, cur_}};\n }\n }\n return Token{Token::QuotedString, StringView(cur_++, 1)};\n }\n\n default: {\n const char* st = cur_;\n\n if (*cur_ == '\\'') {\n for (st = ++cur_; cur_ < end_; ++cur_) {\n if (*cur_ == '\\'') {\n return Token{Token::String, StringView(st, cur_++ - st)};\n }\n }\n\n", "meta": {"hash_id": "77f02f81fbc529b64493ba59df6a5305241848619c4d52992d16b1a2e5a7a896"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 12, "content": " return Token{Token::Invalid, StringView()};\n }\n\n if (isalpha(*cur_) || *cur_ == '_') {\n for (; cur_ < end_; ++cur_) {\n if (!isalpha(*cur_) && !isdigit(*cur_) && *cur_ != '_') {\n break;\n }\n }\n\n return Token{Token::Name, StringView(st, cur_)};\n }\n\n if (isdigit(*cur_) || *cur_ == '-') {\n for (++cur_; cur_ < end_; ++cur_) {\n if (!isdigit(*cur_)) {\n break;\n }\n }\n\n", "meta": {"hash_id": "e0f49fe7ad4d886ef3a9b520cd2d53e73b71782bd32cb9d87526d780f024d39f"}}, {"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 13, "content": " return Token{Token::Number, StringView(st, cur_)};\n }\n\n return Token{Token::Invalid, StringView()};\n }\n }\n }\n\n return Token{Token::EOS, StringView()};\n}\n\n\nconst TypeAst* ParseTypeName(const std::string& type_name) {\n // Cache for type_name.\n // Usually we won't have too many type names in the cache, so do not try to\n // limit cache size.\n static std::map ast_cache;\n static std::mutex lock;\n\n std::lock_guard guard(lock);\n auto it = ast_cache.find(type_name);\n if (it != ast_cache.end()) {\n return &it->second;\n }\n\n auto& ast = ast_cache[type_name];\n if (TypeParser(type_name).Parse(&ast)) {\n return *\n }\n ast_cache.erase(type_name);\n return nullptr;\n}\n\n}\n", "meta": {"hash_id": "3646c0d73dd1837525da158bc9a513e52e8f341bbd45fa79a7814706e2ab91eb"}}]}], "golden_chunks": [{"doc_uuid": "8001dcf16ed7af29411a65852c128e572d3ca6697c0f70ae4e1018147047ea69", "index": 3, "content": "template \ninline int CompateStringsCaseInsensitive(const L& left, const R& right) {\n int64_t size_diff = left.size() - right.size();\n if (size_diff != 0)\n return size_diff > 0 ? 1 : -1;\n\n#if defined _win_\n return _strnicmp(left.data(), right.data(), left.size());\n#else\n return strncasecmp(left.data(), right.data(), left.size());\n#endif\n}\n\nstatic Type::Code GetTypeCode(const std::string& name) {\n auto it = kTypeCode.find(name);\n if (it != kTypeCode.end()) {\n return it->second;\n }\n\n return Type::Void;\n}\n\nstatic TypeAst::Meta GetTypeMeta(const StringView& name) {\n if (name == \"Array\") {\n return TypeAst::Array;\n }\n\n", "meta": {"hash_id": "1692409c14d9a59a83bf7d23ea1cb3c3d3419942cb43c7bc3082703534042c26"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/types/type_parser.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "What regular expression syntax is supported on Windows and Mac for death tests?", "answer": "On Windows and Mac, only a simple regex syntax implemented as part of Google Test is supported. As the comment explains: \n\"On other platforms (e.g. Windows or Mac), we only support a simple regex syntax implemented as part of Google Test. This limited implementation should be enough most of the time when writing death tests; though it lacks many features you can find in PCRE or POSIX extended regex syntax.\"\nThe supported syntax is then documented in detail in the comments.", "golden_doc_uuids": ["e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1"], "golden_chunk_uuids": [["e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", 5]], "golden_documents": [{"uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n// * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n// * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the public API for death tests. It is\n// #included by gtest.h so a user doesn't need to include this\n// directly.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n#define GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n\n#include \"gtest/internal/gtest-death-test-internal.h\"\n\nnamespace testing {\n\n// This flag controls the style of death tests. Valid values are \"threadsafe\",\n// meaning that the death test child process will re-execute the test binary\n// from the start, running only a single death test, or \"fast\",\n// meaning that the child process will execute the test logic immediately\n// after forking.\nGTEST_DECLARE_string_(death_test_style);\n\n#if GTEST_HAS_DEATH_TEST\n\nnamespace internal {\n\n// Returns a Boolean value indicating whether the caller is currently\n// executing in the context of the death test child process. Tools such as\n// Valgrind heap checkers may need this to modify their behavior in death\n// tests. IMPORTANT: This is an internal utility. Using it may break the\n// implementation of death tests. User code MUST NOT use it.\nGTEST_API_ bool InDeathTestChild();\n\n} // namespace internal\n\n// The following macros are useful for writing death tests.\n\n// Here's what happens when an ASSERT_DEATH* or EXPECT_DEATH* is\n// executed:\n//\n// 1. It generates a warning if there is more than one active\n// thread. This is because it's safe to fork() or clone() only\n// when there is a single thread.\n//\n// 2. The parent process clone()s a sub-process and runs the death\n// test in it; the sub-process exits with code 0 at the end of the\n// death test, if it hasn't exited already.\n//\n// 3. The parent process waits for the sub-process to terminate.\n//\n// 4. The parent process checks the exit code and error message of\n// the sub-process.\n//\n// Examples:\n//\n// ASSERT_DEATH(server.SendMessage(56, \"Hello\"), \"Invalid port number\");\n// for (int i = 0; i < 5; i++) {\n// EXPECT_DEATH(server.ProcessRequest(i),\n// \"Invalid request .* in ProcessRequest()\")\n// << \"Failed to die on request \" << i;\n// }\n//\n// ASSERT_EXIT(server.ExitNow(), ::testing::ExitedWithCode(0), \"Exiting\");\n//\n// bool KilledBySIGHUP(int exit_code) {\n// return WIFSIGNALED(exit_code) && WTERMSIG(exit_code) == SIGHUP;\n// }\n//\n// ASSERT_EXIT(client.HangUpServer(), KilledBySIGHUP, \"Hanging up!\");\n//\n// The final parameter to each of these macros is a matcher applied to any data\n// the sub-process wrote to stderr. For compatibility with existing tests, a\n// bare string is interpreted as a regular expression matcher.\n//\n// On the regular expressions used in death tests:\n//\n// GOOGLETEST_CM0005 DO NOT DELETE\n// On POSIX-compliant systems (*nix), we use the library,\n// which uses the POSIX extended regex syntax.\n//\n// On other platforms (e.g. Windows or Mac), we only support a simple regex\n// syntax implemented as part of Google Test. This limited\n// implementation should be enough most of the time when writing\n// death tests; though it lacks many features you can find in PCRE\n// or POSIX extended regex syntax. For example, we don't support\n// union (\"x|y\"), grouping (\"(xy)\"), brackets (\"[xy]\"), and\n// repetition count (\"x{5,7}\"), among others.\n//\n// Below is the syntax that we do support. We chose it to be a\n// subset of both PCRE and POSIX extended regex, so it's easy to\n// learn wherever you come from. In the following: 'A' denotes a\n// literal character, period (.), or a single \\\\ escape sequence;\n// 'x' and 'y' denote regular expressions; 'm' and 'n' are for\n// natural numbers.\n//\n// c matches any literal character c\n// \\\\d matches any decimal digit\n// \\\\D matches any character that's not a decimal digit\n// \\\\f matches \\f\n// \\\\n matches \\n\n// \\\\r matches \\r\n// \\\\s matches any ASCII whitespace, including \\n\n// \\\\S matches any character that's not a whitespace\n// \\\\t matches \\t\n// \\\\v matches \\v\n// \\\\w matches any letter, _, or decimal digit\n// \\\\W matches any character that \\\\w doesn't match\n// \\\\c matches any literal character c, which must be a punctuation\n// . matches any single character except \\n\n// A? matches 0 or 1 occurrences of A\n// A* matches 0 or many occurrences of A\n// A+ matches 1 or many occurrences of A\n// ^ matches the beginning of a string (not that of each line)\n// $ matches the end of a string (not that of each line)\n// xy matches x followed by y\n//\n// If you accidentally use PCRE or POSIX extended regex features\n// not implemented by us, you will get a run-time failure. In that\n// case, please try to rewrite your regular expression within the\n// above syntax.\n//\n// This implementation is *not* meant to be as highly tuned or robust\n// as a compiled regex library, but should perform well enough for a\n// death test, which already incurs significant overhead by launching\n// a child process.\n//\n// Known caveats:\n//\n// A \"threadsafe\" style death test obtains the path to the test\n// program from argv[0] and re-executes it in the sub-process. For\n// simplicity, the current implementation doesn't search the PATH\n// when launching the sub-process. This means that the user must\n// invoke the test program via a path that contains at least one\n// path separator (e.g. path/to/foo_test and\n// /absolute/path/to/bar_test are fine, but foo_test is not). This\n// is rarely a problem as people usually don't put the test binary\n// directory in PATH.\n//\n\n// Asserts that a given `statement` causes the program to exit, with an\n// integer exit status that satisfies `predicate`, and emitting error output\n// that matches `matcher`.\n# define ASSERT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_FATAL_FAILURE_)\n\n// Like `ASSERT_EXIT`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_NONFATAL_FAILURE_)\n\n// Asserts that a given `statement` causes the program to exit, either by\n// explicitly exiting with a nonzero exit code or being killed by a\n// signal, and emitting error output that matches `matcher`.\n# define ASSERT_DEATH(statement, matcher) \\\n ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Like `ASSERT_DEATH`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_DEATH(statement, matcher) \\\n EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*:\n\n// Tests that an exit code describes a normal exit with a given exit code.\nclass GTEST_API_ ExitedWithCode {\n public:\n explicit ExitedWithCode(int exit_code);\n ExitedWithCode(const ExitedWithCode&) = default;\n void operator=(const ExitedWithCode& other) = delete;\n bool operator()(int exit_status) const;\n private:\n const int exit_code_;\n};\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// Tests that an exit code describes an exit due to termination by a\n// given signal.\n// GOOGLETEST_CM0006 DO NOT DELETE\nclass GTEST_API_ KilledBySignal {\n public:\n explicit KilledBySignal(int signum);\n bool operator()(int exit_status) const;\n private:\n const int signum_;\n};\n# endif // !GTEST_OS_WINDOWS\n\n// EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode.\n// The death testing framework causes this to have interesting semantics,\n// since the sideeffects of the call are only visible in opt mode, and not\n// in debug mode.\n//\n// In practice, this can be used to test functions that utilize the\n// LOG(DFATAL) macro using the following style:\n//\n// int DieInDebugOr12(int* sideeffect) {\n// if (sideeffect) {\n// *sideeffect = 12;\n// }\n// LOG(DFATAL) << \"death\";\n// return 12;\n// }\n//\n// TEST(TestSuite, TestDieOr12WorksInDgbAndOpt) {\n// int sideeffect = 0;\n// // Only asserts in dbg.\n// EXPECT_DEBUG_DEATH(DieInDebugOr12(&sideeffect), \"death\");\n//\n// #ifdef NDEBUG\n// // opt-mode has sideeffect visible.\n// EXPECT_EQ(12, sideeffect);\n// #else\n// // dbg-mode no visible sideeffect.\n// EXPECT_EQ(0, sideeffect);\n// #endif\n// }\n//\n// This will assert that DieInDebugReturn12InOpt() crashes in debug\n// mode, usually due to a DCHECK or LOG(DFATAL), but returns the\n// appropriate fallback value (12 in this case) in opt mode. If you\n// need to test that a function has appropriate side-effects in opt\n// mode, include assertions against the side-effects. A general\n// pattern for this is:\n//\n// EXPECT_DEBUG_DEATH({\n// // Side-effects here will have an effect after this statement in\n// // opt mode, but none in debug mode.\n// EXPECT_EQ(12, DieInDebugOr12(&sideeffect));\n// }, \"death\");\n//\n# ifdef NDEBUG\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# else\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n\n# endif // NDEBUG for EXPECT_DEBUG_DEATH\n#endif // GTEST_HAS_DEATH_TEST\n\n// This macro is used for implementing macros such as\n// EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED on systems where\n// death tests are not supported. Those macros must compile on such systems\n// if and only if EXPECT_DEATH and ASSERT_DEATH compile with the same parameters\n// on systems that support death tests. This allows one to write such a macro on\n// a system that does not support death tests and be sure that it will compile\n// on a death-test supporting system. It is exposed publicly so that systems\n// that have death-tests with stricter requirements than GTEST_HAS_DEATH_TEST\n// can write their own equivalent of EXPECT_DEATH_IF_SUPPORTED and\n// ASSERT_DEATH_IF_SUPPORTED.\n//\n// Parameters:\n// statement - A statement that a macro such as EXPECT_DEATH would test\n// for program termination. This macro has to make sure this\n// statement is compiled but not executed, to ensure that\n// EXPECT_DEATH_IF_SUPPORTED compiles with a certain\n// parameter if and only if EXPECT_DEATH compiles with it.\n// regex - A regex that a macro such as EXPECT_DEATH would use to test\n// the output of statement. This parameter has to be\n// compiled but not evaluated by this macro, to ensure that\n// this macro only accepts expressions that a macro such as\n// EXPECT_DEATH would accept.\n// terminator - Must be an empty statement for EXPECT_DEATH_IF_SUPPORTED\n// and a return statement for ASSERT_DEATH_IF_SUPPORTED.\n// This ensures that ASSERT_DEATH_IF_SUPPORTED will not\n// compile inside functions where ASSERT_DEATH doesn't\n// compile.\n//\n// The branch that has an always false condition is used to ensure that\n// statement and regex are compiled (and thus syntactically correct) but\n// never executed. The unreachable code macro protects the terminator\n// statement from generating an 'unreachable code' warning in case\n// statement unconditionally returns or throws. The Message constructor at\n// the end allows the syntax of streaming additional messages into the\n// macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH.\n# define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \\\n GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n if (::testing::internal::AlwaysTrue()) { \\\n GTEST_LOG_(WARNING) \\\n << \"Death tests are not supported on this platform.\\n\" \\\n << \"Statement '\" #statement \"' cannot be verified.\"; \\\n } else if (::testing::internal::AlwaysFalse()) { \\\n ::testing::internal::RE::PartialMatch(\".*\", (regex)); \\\n GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n terminator; \\\n } else \\\n ::testing::Message()\n\n// EXPECT_DEATH_IF_SUPPORTED(statement, regex) and\n// ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if\n// death tests are supported; otherwise they just issue a warning. This is\n// useful when you are combining death test assertions with normal test\n// assertions in one test.\n#if GTEST_HAS_DEATH_TEST\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n#else\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, )\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return)\n#endif\n\n} // namespace testing\n\n#endif // GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n", "meta": {"doctype": "codebase", "relative_path": "/contrib/gtest/include/gtest/gtest-death-test.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 21, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 0, "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n// * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n// * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n", "meta": {"hash_id": "fa2481c972f8fad58ae5c7b4d0de7e235fa2dae176c8fb7e8538af44f7958f4c"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 1, "content": "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n", "meta": {"hash_id": "afa17bcf8d37fb9aaf4a2dc8fed7487226e40dce270804f0321566ca06a3e70f"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 2, "content": "//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the public API for death tests. It is\n// #included by gtest.h so a user doesn't need to include this\n// directly.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n#define GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n\n#include \"gtest/internal/gtest-death-test-internal.h\"\n\nnamespace testing {\n\n// This flag controls the style of death tests. Valid values are \"threadsafe\",\n// meaning that the death test child process will re-execute the test binary\n// from the start, running only a single death test, or \"fast\",\n// meaning that the child process will execute the test logic immediately\n// after forking.\nGTEST_DECLARE_string_(death_test_style);\n\n#if GTEST_HAS_DEATH_TEST\n\nnamespace internal {\n\n", "meta": {"hash_id": "63c2ea58f8032f3482f1ea51a33216aa58871210c005c97cb7b7dd19393eba71"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 3, "content": "// Returns a Boolean value indicating whether the caller is currently\n// executing in the context of the death test child process. Tools such as\n// Valgrind heap checkers may need this to modify their behavior in death\n// tests. IMPORTANT: This is an internal utility. Using it may break the\n// implementation of death tests. User code MUST NOT use it.\nGTEST_API_ bool InDeathTestChild();\n\n} // namespace internal\n\n// The following macros are useful for writing death tests.\n\n// Here's what happens when an ASSERT_DEATH* or EXPECT_DEATH* is\n// executed:\n//\n// 1. It generates a warning if there is more than one active\n// thread. This is because it's safe to fork() or clone() only\n// when there is a single thread.\n//\n// 2. The parent process clone()s a sub-process and runs the death\n// test in it; the sub-process exits with code 0 at the end of the\n// death test, if it hasn't exited already.\n//\n", "meta": {"hash_id": "8faafeed3d01563e90b5c455dca66476f207d5e4833040c8cbb8dee710ef628b"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 4, "content": "// 3. The parent process waits for the sub-process to terminate.\n//\n// 4. The parent process checks the exit code and error message of\n// the sub-process.\n//\n// Examples:\n//\n// ASSERT_DEATH(server.SendMessage(56, \"Hello\"), \"Invalid port number\");\n// for (int i = 0; i < 5; i++) {\n// EXPECT_DEATH(server.ProcessRequest(i),\n// \"Invalid request .* in ProcessRequest()\")\n// << \"Failed to die on request \" << i;\n// }\n//\n// ASSERT_EXIT(server.ExitNow(), ::testing::ExitedWithCode(0), \"Exiting\");\n//\n// bool KilledBySIGHUP(int exit_code) {\n// return WIFSIGNALED(exit_code) && WTERMSIG(exit_code) == SIGHUP;\n// }\n//\n// ASSERT_EXIT(client.HangUpServer(), KilledBySIGHUP, \"Hanging up!\");\n//\n// The final parameter to each of these macros is a matcher applied to any data\n", "meta": {"hash_id": "b9270f2c6e305d677bef6bbcbb29b67f16336bbc5555990793c6f8b0ad070edb"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 5, "content": "// the sub-process wrote to stderr. For compatibility with existing tests, a\n// bare string is interpreted as a regular expression matcher.\n//\n// On the regular expressions used in death tests:\n//\n// GOOGLETEST_CM0005 DO NOT DELETE\n// On POSIX-compliant systems (*nix), we use the library,\n// which uses the POSIX extended regex syntax.\n//\n// On other platforms (e.g. Windows or Mac), we only support a simple regex\n// syntax implemented as part of Google Test. This limited\n// implementation should be enough most of the time when writing\n// death tests; though it lacks many features you can find in PCRE\n// or POSIX extended regex syntax. For example, we don't support\n", "meta": {"hash_id": "a52d5c4237dc69d0f5d2ce610bbc61529725038fb6c9ac6b47342a62f1c6c819"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 6, "content": "// union (\"x|y\"), grouping (\"(xy)\"), brackets (\"[xy]\"), and\n// repetition count (\"x{5,7}\"), among others.\n//\n// Below is the syntax that we do support. We chose it to be a\n// subset of both PCRE and POSIX extended regex, so it's easy to\n// learn wherever you come from. In the following: 'A' denotes a\n// literal character, period (.), or a single \\\\ escape sequence;\n// 'x' and 'y' denote regular expressions; 'm' and 'n' are for\n", "meta": {"hash_id": "d9a12f8cceac4a0d9004520eb1f31b79c4f6dd08f4dc87efbe2aea0b67f2c017"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 7, "content": "// natural numbers.\n//\n// c matches any literal character c\n// \\\\d matches any decimal digit\n// \\\\D matches any character that's not a decimal digit\n// \\\\f matches \\f\n// \\\\n matches \\n\n// \\\\r matches \\r\n// \\\\s matches any ASCII whitespace, including \\n\n// \\\\S matches any character that's not a whitespace\n// \\\\t matches \\t\n// \\\\v matches \\v\n// \\\\w matches any letter, _, or decimal digit\n// \\\\W matches any character that \\\\w doesn't match\n// \\\\c matches any literal character c, which must be a punctuation\n// . matches any single character except \\n\n", "meta": {"hash_id": "3b4e8c3333a501e0720b06437caf5d6439e94a195a5b4dae7e3b108118cd0489"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 8, "content": "// A? matches 0 or 1 occurrences of A\n// A* matches 0 or many occurrences of A\n// A+ matches 1 or many occurrences of A\n// ^ matches the beginning of a string (not that of each line)\n// $ matches the end of a string (not that of each line)\n// xy matches x followed by y\n//\n// If you accidentally use PCRE or POSIX extended regex features\n// not implemented by us, you will get a run-time failure. In that\n", "meta": {"hash_id": "a3aea3e320800377af9d24bec495f5fc137995b8e1f734110db028ce11ba5bfe"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 9, "content": "// case, please try to rewrite your regular expression within the\n// above syntax.\n//\n// This implementation is *not* meant to be as highly tuned or robust\n// as a compiled regex library, but should perform well enough for a\n// death test, which already incurs significant overhead by launching\n// a child process.\n//\n// Known caveats:\n//\n// A \"threadsafe\" style death test obtains the path to the test\n// program from argv[0] and re-executes it in the sub-process. For\n// simplicity, the current implementation doesn't search the PATH\n// when launching the sub-process. This means that the user must\n// invoke the test program via a path that contains at least one\n// path separator (e.g. path/to/foo_test and\n// /absolute/path/to/bar_test are fine, but foo_test is not). This\n// is rarely a problem as people usually don't put the test binary\n// directory in PATH.\n//\n\n", "meta": {"hash_id": "88b9a5dcf47b4538e789a5847d0f3e4cf0452876acc5be15a49630b556152bff"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 10, "content": "// Asserts that a given `statement` causes the program to exit, with an\n// integer exit status that satisfies `predicate`, and emitting error output\n// that matches `matcher`.\n# define ASSERT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_FATAL_FAILURE_)\n\n// Like `ASSERT_EXIT`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_NONFATAL_FAILURE_)\n\n", "meta": {"hash_id": "41aadf4a7c3ae07804a5a467688923c0563c5bf8334d844238baee8d4cd8df2f"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 11, "content": "// Asserts that a given `statement` causes the program to exit, either by\n// explicitly exiting with a nonzero exit code or being killed by a\n// signal, and emitting error output that matches `matcher`.\n# define ASSERT_DEATH(statement, matcher) \\\n ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Like `ASSERT_DEATH`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_DEATH(statement, matcher) \\\n EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*:\n\n", "meta": {"hash_id": "138d3bb5a0fc59b9f7f8672be7479839902f74e372e46f05ff1640afb1fa3089"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 12, "content": "// Tests that an exit code describes a normal exit with a given exit code.\nclass GTEST_API_ ExitedWithCode {\n public:\n explicit ExitedWithCode(int exit_code);\n ExitedWithCode(const ExitedWithCode&) = default;\n void operator=(const ExitedWithCode& other) = delete;\n bool operator()(int exit_status) const;\n private:\n const int exit_code_;\n};\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// Tests that an exit code describes an exit due to termination by a\n// given signal.\n// GOOGLETEST_CM0006 DO NOT DELETE\nclass GTEST_API_ KilledBySignal {\n public:\n explicit KilledBySignal(int signum);\n bool operator()(int exit_status) const;\n private:\n const int signum_;\n};\n# endif // !GTEST_OS_WINDOWS\n\n", "meta": {"hash_id": "7ba168e83774cabf4ee9ff86d68945e3bf1f74dc84f3ca66b3fb101d8652431b"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 13, "content": "// EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode.\n// The death testing framework causes this to have interesting semantics,\n// since the sideeffects of the call are only visible in opt mode, and not\n// in debug mode.\n//\n// In practice, this can be used to test functions that utilize the\n// LOG(DFATAL) macro using the following style:\n//\n// int DieInDebugOr12(int* sideeffect) {\n// if (sideeffect) {\n// *sideeffect = 12;\n// }\n", "meta": {"hash_id": "d9a2f73d5006016ff9c037bcc326ee5f8420fb1ad140d1c1afb16c0943bda070"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 14, "content": "// LOG(DFATAL) << \"death\";\n// return 12;\n// }\n//\n// TEST(TestSuite, TestDieOr12WorksInDgbAndOpt) {\n// int sideeffect = 0;\n// // Only asserts in dbg.\n// EXPECT_DEBUG_DEATH(DieInDebugOr12(&sideeffect), \"death\");\n//\n// #ifdef NDEBUG\n// // opt-mode has sideeffect visible.\n// EXPECT_EQ(12, sideeffect);\n// #else\n// // dbg-mode no visible sideeffect.\n// EXPECT_EQ(0, sideeffect);\n// #endif\n// }\n//\n// This will assert that DieInDebugReturn12InOpt() crashes in debug\n// mode, usually due to a DCHECK or LOG(DFATAL), but returns the\n// appropriate fallback value (12 in this case) in opt mode. If you\n// need to test that a function has appropriate side-effects in opt\n// mode, include assertions against the side-effects. A general\n// pattern for this is:\n//\n// EXPECT_DEBUG_DEATH({\n// // Side-effects here will have an effect after this statement in\n// // opt mode, but none in debug mode.\n// EXPECT_EQ(12, DieInDebugOr12(&sideeffect));\n// }, \"death\");\n//\n# ifdef NDEBUG\n\n", "meta": {"hash_id": "5a133c53c5d7a6eaaefbf4352e0843ae98a4aa0251e57f59c011b1fb6e968ae8"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 15, "content": "# define EXPECT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# else\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n\n# endif // NDEBUG for EXPECT_DEBUG_DEATH\n#endif // GTEST_HAS_DEATH_TEST\n\n", "meta": {"hash_id": "d9c256422a5183912999c081ff0fb308a7f7a3bca4fc86cb214a99f2bd16756a"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 16, "content": "// This macro is used for implementing macros such as\n// EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED on systems where\n// death tests are not supported. Those macros must compile on such systems\n// if and only if EXPECT_DEATH and ASSERT_DEATH compile with the same parameters\n// on systems that support death tests. This allows one to write such a macro on\n// a system that does not support death tests and be sure that it will compile\n// on a death-test supporting system. It is exposed publicly so that systems\n// that have death-tests with stricter requirements than GTEST_HAS_DEATH_TEST\n", "meta": {"hash_id": "f5642735c8d82b78bd23e4fa94190014ea4444b22b8f9cf3af6b0d17040e7992"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 17, "content": "// can write their own equivalent of EXPECT_DEATH_IF_SUPPORTED and\n// ASSERT_DEATH_IF_SUPPORTED.\n//\n// Parameters:\n// statement - A statement that a macro such as EXPECT_DEATH would test\n// for program termination. This macro has to make sure this\n// statement is compiled but not executed, to ensure that\n// EXPECT_DEATH_IF_SUPPORTED compiles with a certain\n// parameter if and only if EXPECT_DEATH compiles with it.\n// regex - A regex that a macro such as EXPECT_DEATH would use to test\n", "meta": {"hash_id": "cc76ee2fa09c6cb8d2187311dbdef1b4dde4b9e68d59ce0149e8021ddf11e76d"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 18, "content": "// the output of statement. This parameter has to be\n// compiled but not evaluated by this macro, to ensure that\n// this macro only accepts expressions that a macro such as\n// EXPECT_DEATH would accept.\n// terminator - Must be an empty statement for EXPECT_DEATH_IF_SUPPORTED\n// and a return statement for ASSERT_DEATH_IF_SUPPORTED.\n// This ensures that ASSERT_DEATH_IF_SUPPORTED will not\n// compile inside functions where ASSERT_DEATH doesn't\n// compile.\n//\n// The branch that has an always false condition is used to ensure that\n// statement and regex are compiled (and thus syntactically correct) but\n// never executed. The unreachable code macro protects the terminator\n// statement from generating an 'unreachable code' warning in case\n// statement unconditionally returns or throws. The Message constructor at\n", "meta": {"hash_id": "8607d0e71e69552252a3d7629615998279f551069b1edcd2d033f82948c3b7a4"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 19, "content": "// the end allows the syntax of streaming additional messages into the\n// macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH.\n# define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \\\n GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n if (::testing::internal::AlwaysTrue()) { \\\n GTEST_LOG_(WARNING) \\\n << \"Death tests are not supported on this platform.\\n\" \\\n << \"Statement '\" #statement \"' cannot be verified.\"; \\\n } else if (::testing::internal::AlwaysFalse()) { \\\n ::testing::internal::RE::PartialMatch(\".*\", (regex)); \\\n GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n terminator; \\\n } else \\\n ::testing::Message()\n\n", "meta": {"hash_id": "8e787174aa745ba646fdf35a44c2979941a6699278d01aa68636794183c76b84"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 20, "content": "// EXPECT_DEATH_IF_SUPPORTED(statement, regex) and\n// ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if\n// death tests are supported; otherwise they just issue a warning. This is\n// useful when you are combining death test assertions with normal test\n// assertions in one test.\n#if GTEST_HAS_DEATH_TEST\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n#else\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, )\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return)\n#endif\n\n} // namespace testing\n\n#endif // GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n", "meta": {"hash_id": "a19a8153685c9a33b21051d7236a7a14679fdbabe4ba62c305689dd0faafc50c"}}]}], "golden_chunks": [{"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 5, "content": "// the sub-process wrote to stderr. For compatibility with existing tests, a\n// bare string is interpreted as a regular expression matcher.\n//\n// On the regular expressions used in death tests:\n//\n// GOOGLETEST_CM0005 DO NOT DELETE\n// On POSIX-compliant systems (*nix), we use the library,\n// which uses the POSIX extended regex syntax.\n//\n// On other platforms (e.g. Windows or Mac), we only support a simple regex\n// syntax implemented as part of Google Test. This limited\n// implementation should be enough most of the time when writing\n// death tests; though it lacks many features you can find in PCRE\n// or POSIX extended regex syntax. For example, we don't support\n", "meta": {"hash_id": "a52d5c4237dc69d0f5d2ce610bbc61529725038fb6c9ac6b47342a62f1c6c819"}}], "meta": {"doctype": "codebase", "relative_path": "/contrib/gtest/include/gtest/gtest-death-test.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 21, "source": "codebases/jsonl"}} +{"query": "What is a known caveat with \"threadsafe\" style death tests?", "answer": "One known caveat with \"threadsafe\" style death tests is that the test program must be invoked via a path that contains at least one path separator. As the comment states:\n\"A 'threadsafe' style death test obtains the path to the test program from argv[0] and re-executes it in the sub-process. For simplicity, the current implementation doesn't search the PATH when launching the sub-process. This means that the user must invoke the test program via a path that contains at least one path separator (e.g. path/to/foo_test and /absolute/path/to/bar_test are fine, but foo_test is not).\"", "golden_doc_uuids": ["e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1"], "golden_chunk_uuids": [["e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", 9]], "golden_documents": [{"uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n// * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n// * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the public API for death tests. It is\n// #included by gtest.h so a user doesn't need to include this\n// directly.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n#define GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n\n#include \"gtest/internal/gtest-death-test-internal.h\"\n\nnamespace testing {\n\n// This flag controls the style of death tests. Valid values are \"threadsafe\",\n// meaning that the death test child process will re-execute the test binary\n// from the start, running only a single death test, or \"fast\",\n// meaning that the child process will execute the test logic immediately\n// after forking.\nGTEST_DECLARE_string_(death_test_style);\n\n#if GTEST_HAS_DEATH_TEST\n\nnamespace internal {\n\n// Returns a Boolean value indicating whether the caller is currently\n// executing in the context of the death test child process. Tools such as\n// Valgrind heap checkers may need this to modify their behavior in death\n// tests. IMPORTANT: This is an internal utility. Using it may break the\n// implementation of death tests. User code MUST NOT use it.\nGTEST_API_ bool InDeathTestChild();\n\n} // namespace internal\n\n// The following macros are useful for writing death tests.\n\n// Here's what happens when an ASSERT_DEATH* or EXPECT_DEATH* is\n// executed:\n//\n// 1. It generates a warning if there is more than one active\n// thread. This is because it's safe to fork() or clone() only\n// when there is a single thread.\n//\n// 2. The parent process clone()s a sub-process and runs the death\n// test in it; the sub-process exits with code 0 at the end of the\n// death test, if it hasn't exited already.\n//\n// 3. The parent process waits for the sub-process to terminate.\n//\n// 4. The parent process checks the exit code and error message of\n// the sub-process.\n//\n// Examples:\n//\n// ASSERT_DEATH(server.SendMessage(56, \"Hello\"), \"Invalid port number\");\n// for (int i = 0; i < 5; i++) {\n// EXPECT_DEATH(server.ProcessRequest(i),\n// \"Invalid request .* in ProcessRequest()\")\n// << \"Failed to die on request \" << i;\n// }\n//\n// ASSERT_EXIT(server.ExitNow(), ::testing::ExitedWithCode(0), \"Exiting\");\n//\n// bool KilledBySIGHUP(int exit_code) {\n// return WIFSIGNALED(exit_code) && WTERMSIG(exit_code) == SIGHUP;\n// }\n//\n// ASSERT_EXIT(client.HangUpServer(), KilledBySIGHUP, \"Hanging up!\");\n//\n// The final parameter to each of these macros is a matcher applied to any data\n// the sub-process wrote to stderr. For compatibility with existing tests, a\n// bare string is interpreted as a regular expression matcher.\n//\n// On the regular expressions used in death tests:\n//\n// GOOGLETEST_CM0005 DO NOT DELETE\n// On POSIX-compliant systems (*nix), we use the library,\n// which uses the POSIX extended regex syntax.\n//\n// On other platforms (e.g. Windows or Mac), we only support a simple regex\n// syntax implemented as part of Google Test. This limited\n// implementation should be enough most of the time when writing\n// death tests; though it lacks many features you can find in PCRE\n// or POSIX extended regex syntax. For example, we don't support\n// union (\"x|y\"), grouping (\"(xy)\"), brackets (\"[xy]\"), and\n// repetition count (\"x{5,7}\"), among others.\n//\n// Below is the syntax that we do support. We chose it to be a\n// subset of both PCRE and POSIX extended regex, so it's easy to\n// learn wherever you come from. In the following: 'A' denotes a\n// literal character, period (.), or a single \\\\ escape sequence;\n// 'x' and 'y' denote regular expressions; 'm' and 'n' are for\n// natural numbers.\n//\n// c matches any literal character c\n// \\\\d matches any decimal digit\n// \\\\D matches any character that's not a decimal digit\n// \\\\f matches \\f\n// \\\\n matches \\n\n// \\\\r matches \\r\n// \\\\s matches any ASCII whitespace, including \\n\n// \\\\S matches any character that's not a whitespace\n// \\\\t matches \\t\n// \\\\v matches \\v\n// \\\\w matches any letter, _, or decimal digit\n// \\\\W matches any character that \\\\w doesn't match\n// \\\\c matches any literal character c, which must be a punctuation\n// . matches any single character except \\n\n// A? matches 0 or 1 occurrences of A\n// A* matches 0 or many occurrences of A\n// A+ matches 1 or many occurrences of A\n// ^ matches the beginning of a string (not that of each line)\n// $ matches the end of a string (not that of each line)\n// xy matches x followed by y\n//\n// If you accidentally use PCRE or POSIX extended regex features\n// not implemented by us, you will get a run-time failure. In that\n// case, please try to rewrite your regular expression within the\n// above syntax.\n//\n// This implementation is *not* meant to be as highly tuned or robust\n// as a compiled regex library, but should perform well enough for a\n// death test, which already incurs significant overhead by launching\n// a child process.\n//\n// Known caveats:\n//\n// A \"threadsafe\" style death test obtains the path to the test\n// program from argv[0] and re-executes it in the sub-process. For\n// simplicity, the current implementation doesn't search the PATH\n// when launching the sub-process. This means that the user must\n// invoke the test program via a path that contains at least one\n// path separator (e.g. path/to/foo_test and\n// /absolute/path/to/bar_test are fine, but foo_test is not). This\n// is rarely a problem as people usually don't put the test binary\n// directory in PATH.\n//\n\n// Asserts that a given `statement` causes the program to exit, with an\n// integer exit status that satisfies `predicate`, and emitting error output\n// that matches `matcher`.\n# define ASSERT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_FATAL_FAILURE_)\n\n// Like `ASSERT_EXIT`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_NONFATAL_FAILURE_)\n\n// Asserts that a given `statement` causes the program to exit, either by\n// explicitly exiting with a nonzero exit code or being killed by a\n// signal, and emitting error output that matches `matcher`.\n# define ASSERT_DEATH(statement, matcher) \\\n ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Like `ASSERT_DEATH`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_DEATH(statement, matcher) \\\n EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*:\n\n// Tests that an exit code describes a normal exit with a given exit code.\nclass GTEST_API_ ExitedWithCode {\n public:\n explicit ExitedWithCode(int exit_code);\n ExitedWithCode(const ExitedWithCode&) = default;\n void operator=(const ExitedWithCode& other) = delete;\n bool operator()(int exit_status) const;\n private:\n const int exit_code_;\n};\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// Tests that an exit code describes an exit due to termination by a\n// given signal.\n// GOOGLETEST_CM0006 DO NOT DELETE\nclass GTEST_API_ KilledBySignal {\n public:\n explicit KilledBySignal(int signum);\n bool operator()(int exit_status) const;\n private:\n const int signum_;\n};\n# endif // !GTEST_OS_WINDOWS\n\n// EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode.\n// The death testing framework causes this to have interesting semantics,\n// since the sideeffects of the call are only visible in opt mode, and not\n// in debug mode.\n//\n// In practice, this can be used to test functions that utilize the\n// LOG(DFATAL) macro using the following style:\n//\n// int DieInDebugOr12(int* sideeffect) {\n// if (sideeffect) {\n// *sideeffect = 12;\n// }\n// LOG(DFATAL) << \"death\";\n// return 12;\n// }\n//\n// TEST(TestSuite, TestDieOr12WorksInDgbAndOpt) {\n// int sideeffect = 0;\n// // Only asserts in dbg.\n// EXPECT_DEBUG_DEATH(DieInDebugOr12(&sideeffect), \"death\");\n//\n// #ifdef NDEBUG\n// // opt-mode has sideeffect visible.\n// EXPECT_EQ(12, sideeffect);\n// #else\n// // dbg-mode no visible sideeffect.\n// EXPECT_EQ(0, sideeffect);\n// #endif\n// }\n//\n// This will assert that DieInDebugReturn12InOpt() crashes in debug\n// mode, usually due to a DCHECK or LOG(DFATAL), but returns the\n// appropriate fallback value (12 in this case) in opt mode. If you\n// need to test that a function has appropriate side-effects in opt\n// mode, include assertions against the side-effects. A general\n// pattern for this is:\n//\n// EXPECT_DEBUG_DEATH({\n// // Side-effects here will have an effect after this statement in\n// // opt mode, but none in debug mode.\n// EXPECT_EQ(12, DieInDebugOr12(&sideeffect));\n// }, \"death\");\n//\n# ifdef NDEBUG\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# else\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n\n# endif // NDEBUG for EXPECT_DEBUG_DEATH\n#endif // GTEST_HAS_DEATH_TEST\n\n// This macro is used for implementing macros such as\n// EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED on systems where\n// death tests are not supported. Those macros must compile on such systems\n// if and only if EXPECT_DEATH and ASSERT_DEATH compile with the same parameters\n// on systems that support death tests. This allows one to write such a macro on\n// a system that does not support death tests and be sure that it will compile\n// on a death-test supporting system. It is exposed publicly so that systems\n// that have death-tests with stricter requirements than GTEST_HAS_DEATH_TEST\n// can write their own equivalent of EXPECT_DEATH_IF_SUPPORTED and\n// ASSERT_DEATH_IF_SUPPORTED.\n//\n// Parameters:\n// statement - A statement that a macro such as EXPECT_DEATH would test\n// for program termination. This macro has to make sure this\n// statement is compiled but not executed, to ensure that\n// EXPECT_DEATH_IF_SUPPORTED compiles with a certain\n// parameter if and only if EXPECT_DEATH compiles with it.\n// regex - A regex that a macro such as EXPECT_DEATH would use to test\n// the output of statement. This parameter has to be\n// compiled but not evaluated by this macro, to ensure that\n// this macro only accepts expressions that a macro such as\n// EXPECT_DEATH would accept.\n// terminator - Must be an empty statement for EXPECT_DEATH_IF_SUPPORTED\n// and a return statement for ASSERT_DEATH_IF_SUPPORTED.\n// This ensures that ASSERT_DEATH_IF_SUPPORTED will not\n// compile inside functions where ASSERT_DEATH doesn't\n// compile.\n//\n// The branch that has an always false condition is used to ensure that\n// statement and regex are compiled (and thus syntactically correct) but\n// never executed. The unreachable code macro protects the terminator\n// statement from generating an 'unreachable code' warning in case\n// statement unconditionally returns or throws. The Message constructor at\n// the end allows the syntax of streaming additional messages into the\n// macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH.\n# define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \\\n GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n if (::testing::internal::AlwaysTrue()) { \\\n GTEST_LOG_(WARNING) \\\n << \"Death tests are not supported on this platform.\\n\" \\\n << \"Statement '\" #statement \"' cannot be verified.\"; \\\n } else if (::testing::internal::AlwaysFalse()) { \\\n ::testing::internal::RE::PartialMatch(\".*\", (regex)); \\\n GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n terminator; \\\n } else \\\n ::testing::Message()\n\n// EXPECT_DEATH_IF_SUPPORTED(statement, regex) and\n// ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if\n// death tests are supported; otherwise they just issue a warning. This is\n// useful when you are combining death test assertions with normal test\n// assertions in one test.\n#if GTEST_HAS_DEATH_TEST\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n#else\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, )\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return)\n#endif\n\n} // namespace testing\n\n#endif // GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n", "meta": {"doctype": "codebase", "relative_path": "/contrib/gtest/include/gtest/gtest-death-test.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 21, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 0, "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n// * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n// * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n", "meta": {"hash_id": "fa2481c972f8fad58ae5c7b4d0de7e235fa2dae176c8fb7e8538af44f7958f4c"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 1, "content": "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n", "meta": {"hash_id": "afa17bcf8d37fb9aaf4a2dc8fed7487226e40dce270804f0321566ca06a3e70f"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 2, "content": "//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the public API for death tests. It is\n// #included by gtest.h so a user doesn't need to include this\n// directly.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n#define GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n\n#include \"gtest/internal/gtest-death-test-internal.h\"\n\nnamespace testing {\n\n// This flag controls the style of death tests. Valid values are \"threadsafe\",\n// meaning that the death test child process will re-execute the test binary\n// from the start, running only a single death test, or \"fast\",\n// meaning that the child process will execute the test logic immediately\n// after forking.\nGTEST_DECLARE_string_(death_test_style);\n\n#if GTEST_HAS_DEATH_TEST\n\nnamespace internal {\n\n", "meta": {"hash_id": "63c2ea58f8032f3482f1ea51a33216aa58871210c005c97cb7b7dd19393eba71"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 3, "content": "// Returns a Boolean value indicating whether the caller is currently\n// executing in the context of the death test child process. Tools such as\n// Valgrind heap checkers may need this to modify their behavior in death\n// tests. IMPORTANT: This is an internal utility. Using it may break the\n// implementation of death tests. User code MUST NOT use it.\nGTEST_API_ bool InDeathTestChild();\n\n} // namespace internal\n\n// The following macros are useful for writing death tests.\n\n// Here's what happens when an ASSERT_DEATH* or EXPECT_DEATH* is\n// executed:\n//\n// 1. It generates a warning if there is more than one active\n// thread. This is because it's safe to fork() or clone() only\n// when there is a single thread.\n//\n// 2. The parent process clone()s a sub-process and runs the death\n// test in it; the sub-process exits with code 0 at the end of the\n// death test, if it hasn't exited already.\n//\n", "meta": {"hash_id": "8faafeed3d01563e90b5c455dca66476f207d5e4833040c8cbb8dee710ef628b"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 4, "content": "// 3. The parent process waits for the sub-process to terminate.\n//\n// 4. The parent process checks the exit code and error message of\n// the sub-process.\n//\n// Examples:\n//\n// ASSERT_DEATH(server.SendMessage(56, \"Hello\"), \"Invalid port number\");\n// for (int i = 0; i < 5; i++) {\n// EXPECT_DEATH(server.ProcessRequest(i),\n// \"Invalid request .* in ProcessRequest()\")\n// << \"Failed to die on request \" << i;\n// }\n//\n// ASSERT_EXIT(server.ExitNow(), ::testing::ExitedWithCode(0), \"Exiting\");\n//\n// bool KilledBySIGHUP(int exit_code) {\n// return WIFSIGNALED(exit_code) && WTERMSIG(exit_code) == SIGHUP;\n// }\n//\n// ASSERT_EXIT(client.HangUpServer(), KilledBySIGHUP, \"Hanging up!\");\n//\n// The final parameter to each of these macros is a matcher applied to any data\n", "meta": {"hash_id": "b9270f2c6e305d677bef6bbcbb29b67f16336bbc5555990793c6f8b0ad070edb"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 5, "content": "// the sub-process wrote to stderr. For compatibility with existing tests, a\n// bare string is interpreted as a regular expression matcher.\n//\n// On the regular expressions used in death tests:\n//\n// GOOGLETEST_CM0005 DO NOT DELETE\n// On POSIX-compliant systems (*nix), we use the library,\n// which uses the POSIX extended regex syntax.\n//\n// On other platforms (e.g. Windows or Mac), we only support a simple regex\n// syntax implemented as part of Google Test. This limited\n// implementation should be enough most of the time when writing\n// death tests; though it lacks many features you can find in PCRE\n// or POSIX extended regex syntax. For example, we don't support\n", "meta": {"hash_id": "a52d5c4237dc69d0f5d2ce610bbc61529725038fb6c9ac6b47342a62f1c6c819"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 6, "content": "// union (\"x|y\"), grouping (\"(xy)\"), brackets (\"[xy]\"), and\n// repetition count (\"x{5,7}\"), among others.\n//\n// Below is the syntax that we do support. We chose it to be a\n// subset of both PCRE and POSIX extended regex, so it's easy to\n// learn wherever you come from. In the following: 'A' denotes a\n// literal character, period (.), or a single \\\\ escape sequence;\n// 'x' and 'y' denote regular expressions; 'm' and 'n' are for\n", "meta": {"hash_id": "d9a12f8cceac4a0d9004520eb1f31b79c4f6dd08f4dc87efbe2aea0b67f2c017"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 7, "content": "// natural numbers.\n//\n// c matches any literal character c\n// \\\\d matches any decimal digit\n// \\\\D matches any character that's not a decimal digit\n// \\\\f matches \\f\n// \\\\n matches \\n\n// \\\\r matches \\r\n// \\\\s matches any ASCII whitespace, including \\n\n// \\\\S matches any character that's not a whitespace\n// \\\\t matches \\t\n// \\\\v matches \\v\n// \\\\w matches any letter, _, or decimal digit\n// \\\\W matches any character that \\\\w doesn't match\n// \\\\c matches any literal character c, which must be a punctuation\n// . matches any single character except \\n\n", "meta": {"hash_id": "3b4e8c3333a501e0720b06437caf5d6439e94a195a5b4dae7e3b108118cd0489"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 8, "content": "// A? matches 0 or 1 occurrences of A\n// A* matches 0 or many occurrences of A\n// A+ matches 1 or many occurrences of A\n// ^ matches the beginning of a string (not that of each line)\n// $ matches the end of a string (not that of each line)\n// xy matches x followed by y\n//\n// If you accidentally use PCRE or POSIX extended regex features\n// not implemented by us, you will get a run-time failure. In that\n", "meta": {"hash_id": "a3aea3e320800377af9d24bec495f5fc137995b8e1f734110db028ce11ba5bfe"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 9, "content": "// case, please try to rewrite your regular expression within the\n// above syntax.\n//\n// This implementation is *not* meant to be as highly tuned or robust\n// as a compiled regex library, but should perform well enough for a\n// death test, which already incurs significant overhead by launching\n// a child process.\n//\n// Known caveats:\n//\n// A \"threadsafe\" style death test obtains the path to the test\n// program from argv[0] and re-executes it in the sub-process. For\n// simplicity, the current implementation doesn't search the PATH\n// when launching the sub-process. This means that the user must\n// invoke the test program via a path that contains at least one\n// path separator (e.g. path/to/foo_test and\n// /absolute/path/to/bar_test are fine, but foo_test is not). This\n// is rarely a problem as people usually don't put the test binary\n// directory in PATH.\n//\n\n", "meta": {"hash_id": "88b9a5dcf47b4538e789a5847d0f3e4cf0452876acc5be15a49630b556152bff"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 10, "content": "// Asserts that a given `statement` causes the program to exit, with an\n// integer exit status that satisfies `predicate`, and emitting error output\n// that matches `matcher`.\n# define ASSERT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_FATAL_FAILURE_)\n\n// Like `ASSERT_EXIT`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_EXIT(statement, predicate, matcher) \\\n GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_NONFATAL_FAILURE_)\n\n", "meta": {"hash_id": "41aadf4a7c3ae07804a5a467688923c0563c5bf8334d844238baee8d4cd8df2f"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 11, "content": "// Asserts that a given `statement` causes the program to exit, either by\n// explicitly exiting with a nonzero exit code or being killed by a\n// signal, and emitting error output that matches `matcher`.\n# define ASSERT_DEATH(statement, matcher) \\\n ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Like `ASSERT_DEATH`, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_DEATH(statement, matcher) \\\n EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher)\n\n// Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*:\n\n", "meta": {"hash_id": "138d3bb5a0fc59b9f7f8672be7479839902f74e372e46f05ff1640afb1fa3089"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 12, "content": "// Tests that an exit code describes a normal exit with a given exit code.\nclass GTEST_API_ ExitedWithCode {\n public:\n explicit ExitedWithCode(int exit_code);\n ExitedWithCode(const ExitedWithCode&) = default;\n void operator=(const ExitedWithCode& other) = delete;\n bool operator()(int exit_status) const;\n private:\n const int exit_code_;\n};\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// Tests that an exit code describes an exit due to termination by a\n// given signal.\n// GOOGLETEST_CM0006 DO NOT DELETE\nclass GTEST_API_ KilledBySignal {\n public:\n explicit KilledBySignal(int signum);\n bool operator()(int exit_status) const;\n private:\n const int signum_;\n};\n# endif // !GTEST_OS_WINDOWS\n\n", "meta": {"hash_id": "7ba168e83774cabf4ee9ff86d68945e3bf1f74dc84f3ca66b3fb101d8652431b"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 13, "content": "// EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode.\n// The death testing framework causes this to have interesting semantics,\n// since the sideeffects of the call are only visible in opt mode, and not\n// in debug mode.\n//\n// In practice, this can be used to test functions that utilize the\n// LOG(DFATAL) macro using the following style:\n//\n// int DieInDebugOr12(int* sideeffect) {\n// if (sideeffect) {\n// *sideeffect = 12;\n// }\n", "meta": {"hash_id": "d9a2f73d5006016ff9c037bcc326ee5f8420fb1ad140d1c1afb16c0943bda070"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 14, "content": "// LOG(DFATAL) << \"death\";\n// return 12;\n// }\n//\n// TEST(TestSuite, TestDieOr12WorksInDgbAndOpt) {\n// int sideeffect = 0;\n// // Only asserts in dbg.\n// EXPECT_DEBUG_DEATH(DieInDebugOr12(&sideeffect), \"death\");\n//\n// #ifdef NDEBUG\n// // opt-mode has sideeffect visible.\n// EXPECT_EQ(12, sideeffect);\n// #else\n// // dbg-mode no visible sideeffect.\n// EXPECT_EQ(0, sideeffect);\n// #endif\n// }\n//\n// This will assert that DieInDebugReturn12InOpt() crashes in debug\n// mode, usually due to a DCHECK or LOG(DFATAL), but returns the\n// appropriate fallback value (12 in this case) in opt mode. If you\n// need to test that a function has appropriate side-effects in opt\n// mode, include assertions against the side-effects. A general\n// pattern for this is:\n//\n// EXPECT_DEBUG_DEATH({\n// // Side-effects here will have an effect after this statement in\n// // opt mode, but none in debug mode.\n// EXPECT_EQ(12, DieInDebugOr12(&sideeffect));\n// }, \"death\");\n//\n# ifdef NDEBUG\n\n", "meta": {"hash_id": "5a133c53c5d7a6eaaefbf4352e0843ae98a4aa0251e57f59c011b1fb6e968ae8"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 15, "content": "# define EXPECT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# else\n\n# define EXPECT_DEBUG_DEATH(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n\n# define ASSERT_DEBUG_DEATH(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n\n# endif // NDEBUG for EXPECT_DEBUG_DEATH\n#endif // GTEST_HAS_DEATH_TEST\n\n", "meta": {"hash_id": "d9c256422a5183912999c081ff0fb308a7f7a3bca4fc86cb214a99f2bd16756a"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 16, "content": "// This macro is used for implementing macros such as\n// EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED on systems where\n// death tests are not supported. Those macros must compile on such systems\n// if and only if EXPECT_DEATH and ASSERT_DEATH compile with the same parameters\n// on systems that support death tests. This allows one to write such a macro on\n// a system that does not support death tests and be sure that it will compile\n// on a death-test supporting system. It is exposed publicly so that systems\n// that have death-tests with stricter requirements than GTEST_HAS_DEATH_TEST\n", "meta": {"hash_id": "f5642735c8d82b78bd23e4fa94190014ea4444b22b8f9cf3af6b0d17040e7992"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 17, "content": "// can write their own equivalent of EXPECT_DEATH_IF_SUPPORTED and\n// ASSERT_DEATH_IF_SUPPORTED.\n//\n// Parameters:\n// statement - A statement that a macro such as EXPECT_DEATH would test\n// for program termination. This macro has to make sure this\n// statement is compiled but not executed, to ensure that\n// EXPECT_DEATH_IF_SUPPORTED compiles with a certain\n// parameter if and only if EXPECT_DEATH compiles with it.\n// regex - A regex that a macro such as EXPECT_DEATH would use to test\n", "meta": {"hash_id": "cc76ee2fa09c6cb8d2187311dbdef1b4dde4b9e68d59ce0149e8021ddf11e76d"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 18, "content": "// the output of statement. This parameter has to be\n// compiled but not evaluated by this macro, to ensure that\n// this macro only accepts expressions that a macro such as\n// EXPECT_DEATH would accept.\n// terminator - Must be an empty statement for EXPECT_DEATH_IF_SUPPORTED\n// and a return statement for ASSERT_DEATH_IF_SUPPORTED.\n// This ensures that ASSERT_DEATH_IF_SUPPORTED will not\n// compile inside functions where ASSERT_DEATH doesn't\n// compile.\n//\n// The branch that has an always false condition is used to ensure that\n// statement and regex are compiled (and thus syntactically correct) but\n// never executed. The unreachable code macro protects the terminator\n// statement from generating an 'unreachable code' warning in case\n// statement unconditionally returns or throws. The Message constructor at\n", "meta": {"hash_id": "8607d0e71e69552252a3d7629615998279f551069b1edcd2d033f82948c3b7a4"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 19, "content": "// the end allows the syntax of streaming additional messages into the\n// macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH.\n# define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \\\n GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n if (::testing::internal::AlwaysTrue()) { \\\n GTEST_LOG_(WARNING) \\\n << \"Death tests are not supported on this platform.\\n\" \\\n << \"Statement '\" #statement \"' cannot be verified.\"; \\\n } else if (::testing::internal::AlwaysFalse()) { \\\n ::testing::internal::RE::PartialMatch(\".*\", (regex)); \\\n GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n terminator; \\\n } else \\\n ::testing::Message()\n\n", "meta": {"hash_id": "8e787174aa745ba646fdf35a44c2979941a6699278d01aa68636794183c76b84"}}, {"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 20, "content": "// EXPECT_DEATH_IF_SUPPORTED(statement, regex) and\n// ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if\n// death tests are supported; otherwise they just issue a warning. This is\n// useful when you are combining death test assertions with normal test\n// assertions in one test.\n#if GTEST_HAS_DEATH_TEST\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n EXPECT_DEATH(statement, regex)\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n ASSERT_DEATH(statement, regex)\n#else\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, )\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return)\n#endif\n\n} // namespace testing\n\n#endif // GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n", "meta": {"hash_id": "a19a8153685c9a33b21051d7236a7a14679fdbabe4ba62c305689dd0faafc50c"}}]}], "golden_chunks": [{"doc_uuid": "e3d478567bc2bb9f55952c093a97847b3dc538cdcdd9d8b74b855857cf238bd1", "index": 9, "content": "// case, please try to rewrite your regular expression within the\n// above syntax.\n//\n// This implementation is *not* meant to be as highly tuned or robust\n// as a compiled regex library, but should perform well enough for a\n// death test, which already incurs significant overhead by launching\n// a child process.\n//\n// Known caveats:\n//\n// A \"threadsafe\" style death test obtains the path to the test\n// program from argv[0] and re-executes it in the sub-process. For\n// simplicity, the current implementation doesn't search the PATH\n// when launching the sub-process. This means that the user must\n// invoke the test program via a path that contains at least one\n// path separator (e.g. path/to/foo_test and\n// /absolute/path/to/bar_test are fine, but foo_test is not). This\n// is rarely a problem as people usually don't put the test binary\n// directory in PATH.\n//\n\n", "meta": {"hash_id": "88b9a5dcf47b4538e789a5847d0f3e4cf0452876acc5be15a49630b556152bff"}}], "meta": {"doctype": "codebase", "relative_path": "/contrib/gtest/include/gtest/gtest-death-test.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 21, "source": "codebases/jsonl"}} +{"query": "How do you read a string using WireFormat?", "answer": "To read a string, you can use the ReadString method:\n\nstatic bool ReadString(InputStream& input, std::string* value);\n\nIt reads a string from the provided InputStream and stores it in the value pointer. The method returns true if the reading was successful, false otherwise. The string length is read using ReadVarint64, and the string data is read using ReadAll.", "golden_doc_uuids": ["b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36"], "golden_chunk_uuids": [["b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", 0]], "golden_documents": [{"uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", "content": "#pragma once\n\n#include \n#include \n\nnamespace clickhouse {\n\nclass InputStream;\nclass OutputStream;\n\nclass WireFormat {\npublic:\n template \n static bool ReadFixed(InputStream& input, T* value);\n static bool ReadString(InputStream& input, std::string* value);\n static bool SkipString(InputStream& input);\n static bool ReadBytes(InputStream& input, void* buf, size_t len);\n static bool ReadUInt64(InputStream& input, uint64_t* value);\n static bool ReadVarint64(InputStream& output, uint64_t* value);\n\n template \n static void WriteFixed(OutputStream& output, const T& value);\n static void WriteBytes(OutputStream& output, const void* buf, size_t len);\n static void WriteString(OutputStream& output, std::string_view value);\n static void WriteUInt64(OutputStream& output, const uint64_t value);\n static void WriteVarint64(OutputStream& output, uint64_t value);\n\nprivate:\n static bool ReadAll(InputStream& input, void* buf, size_t len);\n static void WriteAll(OutputStream& output, const void* buf, size_t len);\n};\n\ntemplate \ninline bool WireFormat::ReadFixed(InputStream& input, T* value) {\n return ReadAll(input, value, sizeof(T));\n}\n\ninline bool WireFormat::ReadString(InputStream& input, std::string* value) {\n uint64_t len = 0;\n if (ReadVarint64(input, &len)) {\n if (len > 0x00FFFFFFULL) {\n return false;\n }\n value->resize((size_t)len);\n return ReadAll(input, value->data(), (size_t)len);\n }\n\n return false;\n}\n\ninline bool WireFormat::ReadBytes(InputStream& input, void* buf, size_t len) {\n return ReadAll(input, buf, len);\n}\n\ninline bool WireFormat::ReadUInt64(InputStream& input, uint64_t* value) {\n return ReadVarint64(input, value);\n}\n\ntemplate \ninline void WireFormat::WriteFixed(OutputStream& output, const T& value) {\n WriteAll(output, &value, sizeof(T));\n}\n\ninline void WireFormat::WriteBytes(OutputStream& output, const void* buf, size_t len) {\n WriteAll(output, buf, len);\n}\n\ninline void WireFormat::WriteString(OutputStream& output, std::string_view value) {\n WriteVarint64(output, value.size());\n WriteAll(output, value.data(), value.size());\n}\n\ninline void WireFormat::WriteUInt64(OutputStream& output, const uint64_t value) {\n WriteVarint64(output, value);\n}\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/base/wire_format.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", "index": 0, "content": "#pragma once\n\n#include \n#include \n\nnamespace clickhouse {\n\nclass InputStream;\nclass OutputStream;\n\nclass WireFormat {\npublic:\n template \n static bool ReadFixed(InputStream& input, T* value);\n static bool ReadString(InputStream& input, std::string* value);\n static bool SkipString(InputStream& input);\n static bool ReadBytes(InputStream& input, void* buf, size_t len);\n static bool ReadUInt64(InputStream& input, uint64_t* value);\n static bool ReadVarint64(InputStream& output, uint64_t* value);\n\n template \n static void WriteFixed(OutputStream& output, const T& value);\n static void WriteBytes(OutputStream& output, const void* buf, size_t len);\n static void WriteString(OutputStream& output, std::string_view value);\n static void WriteUInt64(OutputStream& output, const uint64_t value);\n static void WriteVarint64(OutputStream& output, uint64_t value);\n\n", "meta": {"hash_id": "bf97246315b9bec23cdab5bf50923ddd31a3c60755413603f93faa72a0ca9b40"}}, {"doc_uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", "index": 1, "content": "private:\n static bool ReadAll(InputStream& input, void* buf, size_t len);\n static void WriteAll(OutputStream& output, const void* buf, size_t len);\n};\n\ntemplate \ninline bool WireFormat::ReadFixed(InputStream& input, T* value) {\n return ReadAll(input, value, sizeof(T));\n}\n\ninline bool WireFormat::ReadString(InputStream& input, std::string* value) {\n uint64_t len = 0;\n if (ReadVarint64(input, &len)) {\n if (len > 0x00FFFFFFULL) {\n return false;\n }\n value->resize((size_t)len);\n return ReadAll(input, value->data(), (size_t)len);\n }\n\n", "meta": {"hash_id": "c023c91ebc03f7382de7785af0dba6fbfb8953c9fc3c18133d7f3172bca6d432"}}, {"doc_uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", "index": 2, "content": " return false;\n}\n\ninline bool WireFormat::ReadBytes(InputStream& input, void* buf, size_t len) {\n return ReadAll(input, buf, len);\n}\n\ninline bool WireFormat::ReadUInt64(InputStream& input, uint64_t* value) {\n return ReadVarint64(input, value);\n}\n\ntemplate \ninline void WireFormat::WriteFixed(OutputStream& output, const T& value) {\n WriteAll(output, &value, sizeof(T));\n}\n\ninline void WireFormat::WriteBytes(OutputStream& output, const void* buf, size_t len) {\n WriteAll(output, buf, len);\n}\n\ninline void WireFormat::WriteString(OutputStream& output, std::string_view value) {\n WriteVarint64(output, value.size());\n WriteAll(output, value.data(), value.size());\n}\n\ninline void WireFormat::WriteUInt64(OutputStream& output, const uint64_t value) {\n WriteVarint64(output, value);\n}\n\n}\n", "meta": {"hash_id": "fcbaa63842d50e515f248080fa9244570dc1801b366c143dd6d6e6b843148171"}}]}], "golden_chunks": [{"doc_uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", "index": 0, "content": "#pragma once\n\n#include \n#include \n\nnamespace clickhouse {\n\nclass InputStream;\nclass OutputStream;\n\nclass WireFormat {\npublic:\n template \n static bool ReadFixed(InputStream& input, T* value);\n static bool ReadString(InputStream& input, std::string* value);\n static bool SkipString(InputStream& input);\n static bool ReadBytes(InputStream& input, void* buf, size_t len);\n static bool ReadUInt64(InputStream& input, uint64_t* value);\n static bool ReadVarint64(InputStream& output, uint64_t* value);\n\n template \n static void WriteFixed(OutputStream& output, const T& value);\n static void WriteBytes(OutputStream& output, const void* buf, size_t len);\n static void WriteString(OutputStream& output, std::string_view value);\n static void WriteUInt64(OutputStream& output, const uint64_t value);\n static void WriteVarint64(OutputStream& output, uint64_t value);\n\n", "meta": {"hash_id": "bf97246315b9bec23cdab5bf50923ddd31a3c60755413603f93faa72a0ca9b40"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/base/wire_format.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How do you read a 64-bit unsigned integer using WireFormat?", "answer": "To read a 64-bit unsigned integer, you can use the ReadUInt64 method:\n\nstatic bool ReadUInt64(InputStream& input, uint64_t* value);\n\nIt reads a 64-bit unsigned integer from the provided InputStream using ReadVarint64 and stores it in the value pointer. The method returns true if the reading was successful, false otherwise.", "golden_doc_uuids": ["b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36"], "golden_chunk_uuids": [["b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", 0]], "golden_documents": [{"uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", "content": "#pragma once\n\n#include \n#include \n\nnamespace clickhouse {\n\nclass InputStream;\nclass OutputStream;\n\nclass WireFormat {\npublic:\n template \n static bool ReadFixed(InputStream& input, T* value);\n static bool ReadString(InputStream& input, std::string* value);\n static bool SkipString(InputStream& input);\n static bool ReadBytes(InputStream& input, void* buf, size_t len);\n static bool ReadUInt64(InputStream& input, uint64_t* value);\n static bool ReadVarint64(InputStream& output, uint64_t* value);\n\n template \n static void WriteFixed(OutputStream& output, const T& value);\n static void WriteBytes(OutputStream& output, const void* buf, size_t len);\n static void WriteString(OutputStream& output, std::string_view value);\n static void WriteUInt64(OutputStream& output, const uint64_t value);\n static void WriteVarint64(OutputStream& output, uint64_t value);\n\nprivate:\n static bool ReadAll(InputStream& input, void* buf, size_t len);\n static void WriteAll(OutputStream& output, const void* buf, size_t len);\n};\n\ntemplate \ninline bool WireFormat::ReadFixed(InputStream& input, T* value) {\n return ReadAll(input, value, sizeof(T));\n}\n\ninline bool WireFormat::ReadString(InputStream& input, std::string* value) {\n uint64_t len = 0;\n if (ReadVarint64(input, &len)) {\n if (len > 0x00FFFFFFULL) {\n return false;\n }\n value->resize((size_t)len);\n return ReadAll(input, value->data(), (size_t)len);\n }\n\n return false;\n}\n\ninline bool WireFormat::ReadBytes(InputStream& input, void* buf, size_t len) {\n return ReadAll(input, buf, len);\n}\n\ninline bool WireFormat::ReadUInt64(InputStream& input, uint64_t* value) {\n return ReadVarint64(input, value);\n}\n\ntemplate \ninline void WireFormat::WriteFixed(OutputStream& output, const T& value) {\n WriteAll(output, &value, sizeof(T));\n}\n\ninline void WireFormat::WriteBytes(OutputStream& output, const void* buf, size_t len) {\n WriteAll(output, buf, len);\n}\n\ninline void WireFormat::WriteString(OutputStream& output, std::string_view value) {\n WriteVarint64(output, value.size());\n WriteAll(output, value.data(), value.size());\n}\n\ninline void WireFormat::WriteUInt64(OutputStream& output, const uint64_t value) {\n WriteVarint64(output, value);\n}\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/base/wire_format.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", "index": 0, "content": "#pragma once\n\n#include \n#include \n\nnamespace clickhouse {\n\nclass InputStream;\nclass OutputStream;\n\nclass WireFormat {\npublic:\n template \n static bool ReadFixed(InputStream& input, T* value);\n static bool ReadString(InputStream& input, std::string* value);\n static bool SkipString(InputStream& input);\n static bool ReadBytes(InputStream& input, void* buf, size_t len);\n static bool ReadUInt64(InputStream& input, uint64_t* value);\n static bool ReadVarint64(InputStream& output, uint64_t* value);\n\n template \n static void WriteFixed(OutputStream& output, const T& value);\n static void WriteBytes(OutputStream& output, const void* buf, size_t len);\n static void WriteString(OutputStream& output, std::string_view value);\n static void WriteUInt64(OutputStream& output, const uint64_t value);\n static void WriteVarint64(OutputStream& output, uint64_t value);\n\n", "meta": {"hash_id": "bf97246315b9bec23cdab5bf50923ddd31a3c60755413603f93faa72a0ca9b40"}}, {"doc_uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", "index": 1, "content": "private:\n static bool ReadAll(InputStream& input, void* buf, size_t len);\n static void WriteAll(OutputStream& output, const void* buf, size_t len);\n};\n\ntemplate \ninline bool WireFormat::ReadFixed(InputStream& input, T* value) {\n return ReadAll(input, value, sizeof(T));\n}\n\ninline bool WireFormat::ReadString(InputStream& input, std::string* value) {\n uint64_t len = 0;\n if (ReadVarint64(input, &len)) {\n if (len > 0x00FFFFFFULL) {\n return false;\n }\n value->resize((size_t)len);\n return ReadAll(input, value->data(), (size_t)len);\n }\n\n", "meta": {"hash_id": "c023c91ebc03f7382de7785af0dba6fbfb8953c9fc3c18133d7f3172bca6d432"}}, {"doc_uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", "index": 2, "content": " return false;\n}\n\ninline bool WireFormat::ReadBytes(InputStream& input, void* buf, size_t len) {\n return ReadAll(input, buf, len);\n}\n\ninline bool WireFormat::ReadUInt64(InputStream& input, uint64_t* value) {\n return ReadVarint64(input, value);\n}\n\ntemplate \ninline void WireFormat::WriteFixed(OutputStream& output, const T& value) {\n WriteAll(output, &value, sizeof(T));\n}\n\ninline void WireFormat::WriteBytes(OutputStream& output, const void* buf, size_t len) {\n WriteAll(output, buf, len);\n}\n\ninline void WireFormat::WriteString(OutputStream& output, std::string_view value) {\n WriteVarint64(output, value.size());\n WriteAll(output, value.data(), value.size());\n}\n\ninline void WireFormat::WriteUInt64(OutputStream& output, const uint64_t value) {\n WriteVarint64(output, value);\n}\n\n}\n", "meta": {"hash_id": "fcbaa63842d50e515f248080fa9244570dc1801b366c143dd6d6e6b843148171"}}]}], "golden_chunks": [{"doc_uuid": "b55a4b2aefbbe30b355c360d7f1f24bd114d9699fdd21c2e4eae3f693ab5ef36", "index": 0, "content": "#pragma once\n\n#include \n#include \n\nnamespace clickhouse {\n\nclass InputStream;\nclass OutputStream;\n\nclass WireFormat {\npublic:\n template \n static bool ReadFixed(InputStream& input, T* value);\n static bool ReadString(InputStream& input, std::string* value);\n static bool SkipString(InputStream& input);\n static bool ReadBytes(InputStream& input, void* buf, size_t len);\n static bool ReadUInt64(InputStream& input, uint64_t* value);\n static bool ReadVarint64(InputStream& output, uint64_t* value);\n\n template \n static void WriteFixed(OutputStream& output, const T& value);\n static void WriteBytes(OutputStream& output, const void* buf, size_t len);\n static void WriteString(OutputStream& output, std::string_view value);\n static void WriteUInt64(OutputStream& output, const uint64_t value);\n static void WriteVarint64(OutputStream& output, uint64_t value);\n\n", "meta": {"hash_id": "bf97246315b9bec23cdab5bf50923ddd31a3c60755413603f93faa72a0ca9b40"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/base/wire_format.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the LoadPrefix function in the Column class?", "answer": "According to the code, the LoadPrefix function \"does nothing by default\" and always returns true. This is evident from the comment and the function body:\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nThe function takes an InputStream pointer and a size_t parameter, but doesn't use them. It seems to be a placeholder function that can be overridden in derived classes if needed.", "golden_doc_uuids": ["f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d"], "golden_chunk_uuids": [["f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", 0]], "golden_documents": [{"uuid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/column.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", "index": 0, "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", "meta": {"hash_id": "8a50b3a946061abd97674d082e334b35b45c57360c6a753757fad02030e60a8e"}}]}], "golden_chunks": [{"doc_uuid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", "index": 0, "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", "meta": {"hash_id": "8a50b3a946061abd97674d082e334b35b45c57360c6a753757fad02030e60a8e"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/column.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the SavePrefix function in the Column class?", "answer": "Similar to LoadPrefix, the SavePrefix function in the Column class \"does nothing by default\". It takes an OutputStream pointer as a parameter but doesn't use it. The function has an empty body:\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\nThis function is likely a placeholder that can be overridden in derived classes to perform any necessary prefix-related operations before saving the column data.", "golden_doc_uuids": ["f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d"], "golden_chunk_uuids": [["f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", 0]], "golden_documents": [{"uuid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/column.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", "index": 0, "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", "meta": {"hash_id": "8a50b3a946061abd97674d082e334b35b45c57360c6a753757fad02030e60a8e"}}]}], "golden_chunks": [{"doc_uuid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", "index": 0, "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", "meta": {"hash_id": "8a50b3a946061abd97674d082e334b35b45c57360c6a753757fad02030e60a8e"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/column.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How does the Save function in the Column class work?", "answer": "The Save function in the Column class is responsible for saving the column data to an output stream. It takes an OutputStream pointer as a parameter. The function first calls SavePrefix and then SaveBody:\n\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\nThe actual implementation of SaveBody is not shown in the provided code snippet, but it is expected to write the column data to the output stream.", "golden_doc_uuids": ["f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d"], "golden_chunk_uuids": [["f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", 0]], "golden_documents": [{"uuid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/column.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", "index": 0, "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", "meta": {"hash_id": "8a50b3a946061abd97674d082e334b35b45c57360c6a753757fad02030e60a8e"}}]}], "golden_chunks": [{"doc_uuid": "f08b70253a31ba96a0e3e873e3f53393786ceb5fb150ea3878551b32ccf3914d", "index": 0, "content": "#include \"column.h\"\n\nnamespace clickhouse {\n\nbool Column::LoadPrefix(InputStream*, size_t) {\n /// does nothing by default\n return true;\n}\n\nbool Column::Load(InputStream* input, size_t rows) {\n return LoadPrefix(input, rows) && LoadBody(input, rows);\n}\n\nvoid Column::SavePrefix(OutputStream*) {\n /// does nothing by default\n}\n\n/// Saves column data to output stream.\nvoid Column::Save(OutputStream* output) {\n SavePrefix(output);\n SaveBody(output);\n}\n\n}\n", "meta": {"hash_id": "8a50b3a946061abd97674d082e334b35b45c57360c6a753757fad02030e60a8e"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/column.cpp", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How does the ColumnLowCardinality class handle null values?", "answer": "As mentioned in the comment:\n/*\n * LC column contains an \"invisible\" default item at the beginning of the collection. [default, ...]\n * If the nested type is Nullable, it contains a null-item at the beginning and a default item at the second position. [null, default, ...]\n * Null map is not serialized in LC columns. Instead, nulls are tracked by having an index of 0.\n * */\nIf the nested type is Nullable, the ColumnLowCardinality class stores a null item at the beginning of the dictionary column, followed by a default item. Null values are represented by an index of 0 in the index column.", "golden_doc_uuids": ["087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a"], "golden_chunk_uuids": [["087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", 1]], "golden_documents": [{"uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "content": "#pragma once\n\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"nullable.h\"\n\n#include \n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate \nclass ColumnLowCardinalityT;\n\nnamespace details {\n\n/** LowCardinalityHashKey used as key in unique items hashmap to abstract away key value\n * (type of which depends on dictionary column) and to reduce likelehood of collisions.\n *\n * In order to dramatically reduce collision rate, we use 2 different hashes from 2 different hash functions.\n * First hash is used in hashtable (to calculate item position).\n * Second one is used as part of key value and accessed via `operator==()` upon collision resolution/detection.\n */\nusing LowCardinalityHashKey = std::pair;\n\nstruct LowCardinalityHashKeyHash {\n inline std::size_t operator()(const LowCardinalityHashKey &hash_key) const noexcept {\n return hash_key.first;\n }\n};\n\n}\n\n/*\n * LC column contains an \"invisible\" default item at the beginning of the collection. [default, ...]\n * If the nested type is Nullable, it contains a null-item at the beginning and a default item at the second position. [null, default, ...]\n * Null map is not serialized in LC columns. Instead, nulls are tracked by having an index of 0.\n * */\nclass ColumnLowCardinality : public Column {\npublic:\n using UniqueItems = std::unordered_map;\n\n template \n friend class ColumnLowCardinalityT;\n\nprivate:\n // IMPLEMENTATION NOTE: ColumnLowCardinalityT takes reference to underlying dictionary column object,\n // so make sure to NOT change address of the dictionary object (with reset(), swap()) or with anything else.\n ColumnRef dictionary_column_;\n ColumnRef index_column_;\n UniqueItems unique_items_map_;\n\npublic:\n ColumnLowCardinality(ColumnLowCardinality&& col) = default;\n // c-tor makes a deep copy of the dictionary_column.\n explicit ColumnLowCardinality(ColumnRef dictionary_column);\n explicit ColumnLowCardinality(std::shared_ptr dictionary_column);\n\n template \n explicit ColumnLowCardinality(std::shared_ptr> dictionary_column)\n : ColumnLowCardinality(dictionary_column->template As())\n {}\n\n ~ColumnLowCardinality();\n\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends another LowCardinality column to the end of this one, updating dictionary.\n void Append(ColumnRef /*column*/) override;\n\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data.\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of current column, with compacted dictionary\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n ItemView GetItem(size_t index) const override;\n\n size_t GetDictionarySize() const;\n TypeRef GetNestedType() const;\n\nprotected:\n std::uint64_t getDictionaryIndex(std::uint64_t item_index) const;\n void appendIndex(std::uint64_t item_index);\n void removeLastIndex();\n ColumnRef GetDictionary();\n\n void AppendUnsafe(const ItemView &);\n\nprivate:\n void Setup(ColumnRef dictionary_column);\n void AppendNullItem();\n void AppendDefaultItem();\n\npublic:\n static details::LowCardinalityHashKey computeHashKey(const ItemView &);\n};\n\n/** Type-aware wrapper that provides simple convenience interface for accessing/appending individual items.\n */\ntemplate \nclass ColumnLowCardinalityT : public ColumnLowCardinality {\n\n DictionaryColumnType& typed_dictionary_;\n const Type::Code type_;\n\npublic:\n using WrappedColumnType = DictionaryColumnType;\n // Type this column takes as argument of Append and returns with At() and operator[]\n using ValueType = typename DictionaryColumnType::ValueType;\n\n explicit ColumnLowCardinalityT(ColumnLowCardinality&& col)\n : ColumnLowCardinality(std::move(col))\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {\n }\n\n template \n explicit ColumnLowCardinalityT(Args &&... args)\n : ColumnLowCardinalityT(std::make_shared(std::forward(args)...))\n {}\n\n // Create LC column from existing T-column, making a deep copy of all contents.\n explicit ColumnLowCardinalityT(std::shared_ptr dictionary_col)\n : ColumnLowCardinality(dictionary_col)\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {}\n\n /// Extended interface to simplify reading/adding individual items.\n\n /// Returns element at given row number.\n inline ValueType At(size_t n) const {\n return typed_dictionary_.At(getDictionaryIndex(n));\n }\n\n /// Returns element at given row number.\n inline ValueType operator [] (size_t n) const {\n return typed_dictionary_[getDictionaryIndex(n)];\n }\n\n // so the non-virtual Append below doesn't shadow Append() from base class when compiled with older compilers.\n using ColumnLowCardinality::Append;\n\n inline void Append(const ValueType & value) {\n if constexpr (IsNullable) {\n if (value.has_value()) {\n AppendUnsafe(ItemView{type_, *value});\n } else {\n AppendUnsafe(ItemView{});\n }\n } else {\n AppendUnsafe(ItemView{type_, value});\n }\n }\n\n template \n inline void AppendMany(const T& container) {\n for (const auto & item : container) {\n Append(item);\n }\n }\n\n /** Create a ColumnLowCardinalityT from a ColumnLowCardinality, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnLowCardinality&& col) {\n return std::make_shared>(std::move(col));\n }\n\n static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnLowCardinality::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnLowCardinality::CloneEmpty()); }\n\nprivate:\n\n template \n static auto GetTypeCode(T& column) {\n if constexpr (IsNullable) {\n return GetTypeCode(*column.Nested()->template AsStrict());\n } else {\n return column.Type()->GetCode();\n }\n }\n};\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/lowcardinality.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 0, "content": "#pragma once\n\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"nullable.h\"\n\n#include \n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate \nclass ColumnLowCardinalityT;\n\nnamespace details {\n\n/** LowCardinalityHashKey used as key in unique items hashmap to abstract away key value\n * (type of which depends on dictionary column) and to reduce likelehood of collisions.\n *\n * In order to dramatically reduce collision rate, we use 2 different hashes from 2 different hash functions.\n * First hash is used in hashtable (to calculate item position).\n * Second one is used as part of key value and accessed via `operator==()` upon collision resolution/detection.\n */\nusing LowCardinalityHashKey = std::pair;\n\n", "meta": {"hash_id": "ea61a56f5913c43b9ab9b8e9d391b2e31d3070e90d504362df1007f258cb4e2a"}}, {"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 1, "content": "struct LowCardinalityHashKeyHash {\n inline std::size_t operator()(const LowCardinalityHashKey &hash_key) const noexcept {\n return hash_key.first;\n }\n};\n\n}\n\n/*\n * LC column contains an \"invisible\" default item at the beginning of the collection. [default, ...]\n * If the nested type is Nullable, it contains a null-item at the beginning and a default item at the second position. [null, default, ...]\n * Null map is not serialized in LC columns. Instead, nulls are tracked by having an index of 0.\n * */\nclass ColumnLowCardinality : public Column {\npublic:\n using UniqueItems = std::unordered_map;\n\n", "meta": {"hash_id": "7b8507227202e6f0dcea8d11d9672b54c60f0297a0a60da89d0086cbb63eb209"}}, {"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 2, "content": " template \n friend class ColumnLowCardinalityT;\n\nprivate:\n // IMPLEMENTATION NOTE: ColumnLowCardinalityT takes reference to underlying dictionary column object,\n // so make sure to NOT change address of the dictionary object (with reset(), swap()) or with anything else.\n ColumnRef dictionary_column_;\n ColumnRef index_column_;\n UniqueItems unique_items_map_;\n\npublic:\n ColumnLowCardinality(ColumnLowCardinality&& col) = default;\n // c-tor makes a deep copy of the dictionary_column.\n explicit ColumnLowCardinality(ColumnRef dictionary_column);\n explicit ColumnLowCardinality(std::shared_ptr dictionary_column);\n\n template \n explicit ColumnLowCardinality(std::shared_ptr> dictionary_column)\n : ColumnLowCardinality(dictionary_column->template As())\n {}\n\n", "meta": {"hash_id": "23a6409f2e20a765d7b4b759abc005bc8325db180c30190b57ba29a276629631"}}, {"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 3, "content": " ~ColumnLowCardinality();\n\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends another LowCardinality column to the end of this one, updating dictionary.\n void Append(ColumnRef /*column*/) override;\n\n bool LoadPrefix(InputStream* input, size_t rows) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column prefix to output stream.\n void SavePrefix(OutputStream* output) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data.\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n", "meta": {"hash_id": "4e9ed046d11bce0338741c8c1412e2454d8f1195b38707ff00a4500ba152791b"}}, {"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 4, "content": " /// Makes slice of current column, with compacted dictionary\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n ItemView GetItem(size_t index) const override;\n\n size_t GetDictionarySize() const;\n TypeRef GetNestedType() const;\n\nprotected:\n std::uint64_t getDictionaryIndex(std::uint64_t item_index) const;\n void appendIndex(std::uint64_t item_index);\n void removeLastIndex();\n ColumnRef GetDictionary();\n\n void AppendUnsafe(const ItemView &);\n\nprivate:\n void Setup(ColumnRef dictionary_column);\n void AppendNullItem();\n void AppendDefaultItem();\n\npublic:\n static details::LowCardinalityHashKey computeHashKey(const ItemView &);\n};\n\n/** Type-aware wrapper that provides simple convenience interface for accessing/appending individual items.\n */\ntemplate \nclass ColumnLowCardinalityT : public ColumnLowCardinality {\n\n", "meta": {"hash_id": "3dc2e3f286fe7255c3b43e8eeaa0a3d37b1600e4a614291204b4393b7bbb1190"}}, {"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 5, "content": " DictionaryColumnType& typed_dictionary_;\n const Type::Code type_;\n\npublic:\n using WrappedColumnType = DictionaryColumnType;\n // Type this column takes as argument of Append and returns with At() and operator[]\n using ValueType = typename DictionaryColumnType::ValueType;\n\n explicit ColumnLowCardinalityT(ColumnLowCardinality&& col)\n : ColumnLowCardinality(std::move(col))\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {\n }\n\n template \n explicit ColumnLowCardinalityT(Args &&... args)\n : ColumnLowCardinalityT(std::make_shared(std::forward(args)...))\n {}\n\n", "meta": {"hash_id": "e2386d53914a92c4454125497ab0c5d00c741ec7cc3ce14ea531874f01d577fa"}}, {"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 6, "content": " // Create LC column from existing T-column, making a deep copy of all contents.\n explicit ColumnLowCardinalityT(std::shared_ptr dictionary_col)\n : ColumnLowCardinality(dictionary_col)\n , typed_dictionary_(dynamic_cast(*GetDictionary()))\n , type_(GetTypeCode(typed_dictionary_))\n {}\n\n /// Extended interface to simplify reading/adding individual items.\n\n /// Returns element at given row number.\n inline ValueType At(size_t n) const {\n return typed_dictionary_.At(getDictionaryIndex(n));\n }\n\n /// Returns element at given row number.\n inline ValueType operator [] (size_t n) const {\n return typed_dictionary_[getDictionaryIndex(n)];\n }\n\n // so the non-virtual Append below doesn't shadow Append() from base class when compiled with older compilers.\n using ColumnLowCardinality::Append;\n\n", "meta": {"hash_id": "6aaf49ac725dca77b5127eed1d9b181432cc5a40083b6c207adf3e29327683df"}}, {"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 7, "content": " inline void Append(const ValueType & value) {\n if constexpr (IsNullable) {\n if (value.has_value()) {\n AppendUnsafe(ItemView{type_, *value});\n } else {\n AppendUnsafe(ItemView{});\n }\n } else {\n AppendUnsafe(ItemView{type_, value});\n }\n }\n\n template \n inline void AppendMany(const T& container) {\n for (const auto & item : container) {\n Append(item);\n }\n }\n\n", "meta": {"hash_id": "7ce8acb9d7a2fedb8d1aee097b8fc855c3530613457524d4cb93a2b6b63ad5fa"}}, {"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 8, "content": " /** Create a ColumnLowCardinalityT from a ColumnLowCardinality, without copying data and offsets, but by\n * 'stealing' those from `col`.\n *\n * Ownership of column internals is transferred to returned object, original (argument) object\n * MUST NOT BE USED IN ANY WAY, it is only safe to dispose it.\n *\n * Throws an exception if `col` is of wrong type, it is safe to use original col in this case.\n * This is a static method to make such conversion verbose.\n */\n static auto Wrap(ColumnLowCardinality&& col) {\n return std::make_shared>(std::move(col));\n }\n\n", "meta": {"hash_id": "1d9465ace2062caa9a168ba0db3fa6526086c03253e61b922c555c5a47fcfa5e"}}, {"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 9, "content": " static auto Wrap(Column&& col) { return Wrap(std::move(dynamic_cast(col))); }\n\n // Helper to simplify integration with other APIs\n static auto Wrap(ColumnRef&& col) { return Wrap(std::move(*col->AsStrict())); }\n\n ColumnRef Slice(size_t begin, size_t size) const override {\n return Wrap(ColumnLowCardinality::Slice(begin, size));\n }\n\n ColumnRef CloneEmpty() const override { return Wrap(ColumnLowCardinality::CloneEmpty()); }\n\nprivate:\n\n template \n static auto GetTypeCode(T& column) {\n if constexpr (IsNullable) {\n return GetTypeCode(*column.Nested()->template AsStrict());\n } else {\n return column.Type()->GetCode();\n }\n }\n};\n\n}\n", "meta": {"hash_id": "5200549fa2077a9c79a2b7e280f5b61c18c7add23b1fd9a1efdc3a4c103f4231"}}]}], "golden_chunks": [{"doc_uuid": "087c7915a5df0e768bca07ed728ccdebfa8bfd6d5eeb03a815ad219ead305e6a", "index": 1, "content": "struct LowCardinalityHashKeyHash {\n inline std::size_t operator()(const LowCardinalityHashKey &hash_key) const noexcept {\n return hash_key.first;\n }\n};\n\n}\n\n/*\n * LC column contains an \"invisible\" default item at the beginning of the collection. [default, ...]\n * If the nested type is Nullable, it contains a null-item at the beginning and a default item at the second position. [null, default, ...]\n * Null map is not serialized in LC columns. Instead, nulls are tracked by having an index of 0.\n * */\nclass ColumnLowCardinality : public Column {\npublic:\n using UniqueItems = std::unordered_map;\n\n", "meta": {"hash_id": "7b8507227202e6f0dcea8d11d9672b54c60f0297a0a60da89d0086cbb63eb209"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/lowcardinality.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "What geometric data types are supported by the code?", "answer": "The code defines several type aliases using the ColumnGeo class template:\n- ColumnPoint: Represents a Point column, which is a ColumnGeo of ColumnTupleT with Type::Code::Point.\n- ColumnRing: Represents a Ring column, which is a ColumnGeo of ColumnArrayT with Type::Code::Ring.\n- ColumnPolygon: Represents a Polygon column, which is a ColumnGeo of ColumnArrayT with Type::Code::Polygon.\n- ColumnMultiPolygon: Represents a MultiPolygon column, which is a ColumnGeo of ColumnArrayT with Type::Code::MultiPolygon.", "golden_doc_uuids": ["130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6"], "golden_chunk_uuids": [["130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", 2]], "golden_documents": [{"uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/geo.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 0, "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\n", "meta": {"hash_id": "91854448f7b827201efd47f63485a7f9c7938f68f6579a9fc1fcf0db1e9a45f4"}}, {"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 1, "content": "public:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n", "meta": {"hash_id": "7136d0cc76ab9f15bf4c8d363bec9d3d0dd34ececfb4a9c184a8e44e7cc48706"}}, {"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 2, "content": " /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"hash_id": "15626b152ff8f2d8f7e1f7fd6d11719068a0b020b869e99d0286fbc7eb876575"}}]}], "golden_chunks": [{"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 2, "content": " /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"hash_id": "15626b152ff8f2d8f7e1f7fd6d11719068a0b020b869e99d0286fbc7eb876575"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/geo.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How can you append an element to a ColumnGeo?", "answer": "The ColumnGeo class provides a template method Append() that allows appending an element to the end of the column. The method takes a single argument of type ValueType (which is derived from the NestedColumnType) or any type T that is convertible to ValueType. For example:\n\ntemplate \nvoid Append(const T& value) {\n data_->Append(value);\n}\n", "golden_doc_uuids": ["130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6"], "golden_chunk_uuids": [["130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", 0]], "golden_documents": [{"uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/geo.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 0, "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\n", "meta": {"hash_id": "91854448f7b827201efd47f63485a7f9c7938f68f6579a9fc1fcf0db1e9a45f4"}}, {"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 1, "content": "public:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n", "meta": {"hash_id": "7136d0cc76ab9f15bf4c8d363bec9d3d0dd34ececfb4a9c184a8e44e7cc48706"}}, {"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 2, "content": " /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"hash_id": "15626b152ff8f2d8f7e1f7fd6d11719068a0b020b869e99d0286fbc7eb876575"}}]}], "golden_chunks": [{"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 0, "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\n", "meta": {"hash_id": "91854448f7b827201efd47f63485a7f9c7938f68f6579a9fc1fcf0db1e9a45f4"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/geo.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How can you access an element in a ColumnGeo?", "answer": "The ColumnGeo class provides two methods for accessing elements:\n- At(size_t n): Returns the element at the given row number n.\n- operator[](size_t n): Returns the element at the given row number n using the array subscript operator.\nFor example:\n\nconst ValueType At(size_t n) const;\n\ninline const ValueType operator[](size_t n) const { return At(n); }\n", "golden_doc_uuids": ["130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6"], "golden_chunk_uuids": [["130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", 0]], "golden_documents": [{"uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/geo.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 0, "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\n", "meta": {"hash_id": "91854448f7b827201efd47f63485a7f9c7938f68f6579a9fc1fcf0db1e9a45f4"}}, {"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 1, "content": "public:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n", "meta": {"hash_id": "7136d0cc76ab9f15bf4c8d363bec9d3d0dd34ececfb4a9c184a8e44e7cc48706"}}, {"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 2, "content": " /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"hash_id": "15626b152ff8f2d8f7e1f7fd6d11719068a0b020b869e99d0286fbc7eb876575"}}]}], "golden_chunks": [{"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 0, "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\n", "meta": {"hash_id": "91854448f7b827201efd47f63485a7f9c7938f68f6579a9fc1fcf0db1e9a45f4"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/geo.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How can you append the content of one ColumnGeo to another?", "answer": "The ColumnGeo class provides an Append(ColumnRef column) method that appends the content of the given column to the end of the current column. It is an overridden method from the base Column class. For example:\n\nvoid Append(ColumnRef column) override;\n", "golden_doc_uuids": ["130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6"], "golden_chunk_uuids": [["130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", 1]], "golden_documents": [{"uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/geo.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 0, "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\n", "meta": {"hash_id": "91854448f7b827201efd47f63485a7f9c7938f68f6579a9fc1fcf0db1e9a45f4"}}, {"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 1, "content": "public:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n", "meta": {"hash_id": "7136d0cc76ab9f15bf4c8d363bec9d3d0dd34ececfb4a9c184a8e44e7cc48706"}}, {"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 2, "content": " /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"hash_id": "15626b152ff8f2d8f7e1f7fd6d11719068a0b020b869e99d0286fbc7eb876575"}}]}], "golden_chunks": [{"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 1, "content": "public:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n", "meta": {"hash_id": "7136d0cc76ab9f15bf4c8d363bec9d3d0dd34ececfb4a9c184a8e44e7cc48706"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/geo.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How can you clear the data of a ColumnGeo?", "answer": "The ColumnGeo class provides a Clear() method that clears the column data. It is an overridden method from the base Column class. For example:\n\nvoid Clear() override;\n", "golden_doc_uuids": ["130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6"], "golden_chunk_uuids": [["130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", 2]], "golden_documents": [{"uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\npublic:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/geo.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 0, "content": "#pragma once\n\n#include \"array.h\"\n#include \"column.h\"\n#include \"numeric.h\"\n#include \"tuple.h\"\n\nnamespace clickhouse {\n\ntemplate \nclass ColumnGeo : public Column {\npublic:\n using ValueType = typename NestedColumnType::ValueType;\n\n ColumnGeo();\n\n explicit ColumnGeo(ColumnRef data);\n\n /// Appends one element to the end of column.\n template \n void Append(const T& value) {\n data_->Append(value);\n }\n\n /// Returns element at given row number.\n const ValueType At(size_t n) const;\n\n /// Returns element at given row number.\n inline const ValueType operator[](size_t n) const { return At(n); }\n\n", "meta": {"hash_id": "91854448f7b827201efd47f63485a7f9c7938f68f6579a9fc1fcf0db1e9a45f4"}}, {"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 1, "content": "public:\n /// Increase the capacity of the column for large block insertion.\n void Reserve(size_t new_cap) override;\n\n /// Appends content of given column to the end of current one.\n void Append(ColumnRef column) override;\n\n /// Loads column data from input stream.\n bool LoadBody(InputStream* input, size_t rows) override;\n\n /// Saves column data to output stream.\n void SaveBody(OutputStream* output) override;\n\n", "meta": {"hash_id": "7136d0cc76ab9f15bf4c8d363bec9d3d0dd34ececfb4a9c184a8e44e7cc48706"}}, {"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 2, "content": " /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"hash_id": "15626b152ff8f2d8f7e1f7fd6d11719068a0b020b869e99d0286fbc7eb876575"}}]}], "golden_chunks": [{"doc_uuid": "130da0aba10d9e75238c693ee9b6ea2494f8eca2a10e29fa834efe3729ee5bd6", "index": 2, "content": " /// Clear column data .\n void Clear() override;\n\n /// Returns count of rows in the column.\n size_t Size() const override;\n\n /// Makes slice of the current column.\n ColumnRef Slice(size_t begin, size_t len) const override;\n ColumnRef CloneEmpty() const override;\n void Swap(Column& other) override;\n\nprivate:\n std::shared_ptr data_;\n};\n\n// /**\n// * Represents a Point column.\n// */\nusing ColumnPoint = ColumnGeo, Type::Code::Point>;\n\n/**\n * Represents a Ring column.\n */\nusing ColumnRing = ColumnGeo, Type::Code::Ring>;\n\n/**\n * Represents a Polygon column.\n */\nusing ColumnPolygon = ColumnGeo, Type::Code::Polygon>;\n\n/**\n * Represents a MultiPolygon column.\n */\nusing ColumnMultiPolygon = ColumnGeo, Type::Code::MultiPolygon>;\n\n} // namespace clickhouse\n", "meta": {"hash_id": "15626b152ff8f2d8f7e1f7fd6d11719068a0b020b869e99d0286fbc7eb876575"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/columns/geo.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How do you construct a ProjectedIterator?", "answer": "To construct a ProjectedIterator, you need to provide an iterator and a unary function as arguments to the constructor. The constructor signature is:\ninline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\nIt takes the underlying iterator by const reference and the unary function by value, and initializes the corresponding member variables iterator_ and functor_.", "golden_doc_uuids": ["43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd"], "golden_chunk_uuids": [["43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", 1]], "golden_documents": [{"uuid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", "content": "#pragma once\n\n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate ()(std::declval())),\n typename Value = std::decay_t>\nclass ProjectedIterator {\npublic:\n using value_type = Value;\n using reference = Reference;\n using pointer = Reference;\n using difference_type = typename std::iterator_traits::difference_type;\n using iterator_category = typename std::iterator_traits::iterator_category;\n\n ProjectedIterator() = default;\n\n inline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\n : iterator_(iterator)\n , functor_(std::move(functor)) {\n }\n\n inline UnaryFunction functor() const { return functor; }\n\n inline Iterator const& base() const { return iterator_; }\n\n inline reference operator*() const { return functor_(iterator_); }\n\n inline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n }\n\n inline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n }\n\n inline bool operator==(const ProjectedIterator& other) const {\n return this->iterator_ == other.iterator_;\n }\n\n inline bool operator!=(const ProjectedIterator& other) const {\n return !(*this == other);\n }\n\nprivate:\n Iterator iterator_;\n UnaryFunction functor_;\n};\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/base/projected_iterator.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", "index": 0, "content": "#pragma once\n\n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate ()(std::declval())),\n typename Value = std::decay_t>\nclass ProjectedIterator {\npublic:\n using value_type = Value;\n using reference = Reference;\n using pointer = Reference;\n using difference_type = typename std::iterator_traits::difference_type;\n using iterator_category = typename std::iterator_traits::iterator_category;\n\n", "meta": {"hash_id": "eb863fb0a99f7952c980d3720f93e64b932c7555b41b0b39651eaafd5b52be4a"}}, {"doc_uuid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", "index": 1, "content": " ProjectedIterator() = default;\n\n inline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\n : iterator_(iterator)\n , functor_(std::move(functor)) {\n }\n\n inline UnaryFunction functor() const { return functor; }\n\n inline Iterator const& base() const { return iterator_; }\n\n inline reference operator*() const { return functor_(iterator_); }\n\n inline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n }\n\n inline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n }\n\n inline bool operator==(const ProjectedIterator& other) const {\n return this->iterator_ == other.iterator_;\n }\n\n inline bool operator!=(const ProjectedIterator& other) const {\n return !(*this == other);\n }\n\nprivate:\n Iterator iterator_;\n UnaryFunction functor_;\n};\n\n} // namespace clickhouse\n", "meta": {"hash_id": "3b9094e7210cd293ad6cab4030fa9635b543d03f6727eab0a42d3e039c633789"}}]}], "golden_chunks": [{"doc_uuid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", "index": 1, "content": " ProjectedIterator() = default;\n\n inline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\n : iterator_(iterator)\n , functor_(std::move(functor)) {\n }\n\n inline UnaryFunction functor() const { return functor; }\n\n inline Iterator const& base() const { return iterator_; }\n\n inline reference operator*() const { return functor_(iterator_); }\n\n inline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n }\n\n inline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n }\n\n inline bool operator==(const ProjectedIterator& other) const {\n return this->iterator_ == other.iterator_;\n }\n\n inline bool operator!=(const ProjectedIterator& other) const {\n return !(*this == other);\n }\n\nprivate:\n Iterator iterator_;\n UnaryFunction functor_;\n};\n\n} // namespace clickhouse\n", "meta": {"hash_id": "3b9094e7210cd293ad6cab4030fa9635b543d03f6727eab0a42d3e039c633789"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/base/projected_iterator.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How do you increment and decrement a ProjectedIterator?", "answer": "The ProjectedIterator class overloads the increment (++) and decrement (--) operators to move the underlying iterator forward or backward. The operator implementations are:\n\ninline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n}\n\ninline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n}\n\nWhen the increment or decrement operator is called on a ProjectedIterator, it applies the corresponding operation to the underlying iterator (iterator_) and returns a reference to the updated ProjectedIterator.", "golden_doc_uuids": ["43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd"], "golden_chunk_uuids": [["43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", 1]], "golden_documents": [{"uuid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", "content": "#pragma once\n\n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate ()(std::declval())),\n typename Value = std::decay_t>\nclass ProjectedIterator {\npublic:\n using value_type = Value;\n using reference = Reference;\n using pointer = Reference;\n using difference_type = typename std::iterator_traits::difference_type;\n using iterator_category = typename std::iterator_traits::iterator_category;\n\n ProjectedIterator() = default;\n\n inline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\n : iterator_(iterator)\n , functor_(std::move(functor)) {\n }\n\n inline UnaryFunction functor() const { return functor; }\n\n inline Iterator const& base() const { return iterator_; }\n\n inline reference operator*() const { return functor_(iterator_); }\n\n inline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n }\n\n inline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n }\n\n inline bool operator==(const ProjectedIterator& other) const {\n return this->iterator_ == other.iterator_;\n }\n\n inline bool operator!=(const ProjectedIterator& other) const {\n return !(*this == other);\n }\n\nprivate:\n Iterator iterator_;\n UnaryFunction functor_;\n};\n\n} // namespace clickhouse\n", "meta": {"doctype": "codebase", "relative_path": "/clickhouse/base/projected_iterator.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", "index": 0, "content": "#pragma once\n\n#include \n#include \n#include \n\nnamespace clickhouse {\n\ntemplate ()(std::declval())),\n typename Value = std::decay_t>\nclass ProjectedIterator {\npublic:\n using value_type = Value;\n using reference = Reference;\n using pointer = Reference;\n using difference_type = typename std::iterator_traits::difference_type;\n using iterator_category = typename std::iterator_traits::iterator_category;\n\n", "meta": {"hash_id": "eb863fb0a99f7952c980d3720f93e64b932c7555b41b0b39651eaafd5b52be4a"}}, {"doc_uuid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", "index": 1, "content": " ProjectedIterator() = default;\n\n inline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\n : iterator_(iterator)\n , functor_(std::move(functor)) {\n }\n\n inline UnaryFunction functor() const { return functor; }\n\n inline Iterator const& base() const { return iterator_; }\n\n inline reference operator*() const { return functor_(iterator_); }\n\n inline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n }\n\n inline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n }\n\n inline bool operator==(const ProjectedIterator& other) const {\n return this->iterator_ == other.iterator_;\n }\n\n inline bool operator!=(const ProjectedIterator& other) const {\n return !(*this == other);\n }\n\nprivate:\n Iterator iterator_;\n UnaryFunction functor_;\n};\n\n} // namespace clickhouse\n", "meta": {"hash_id": "3b9094e7210cd293ad6cab4030fa9635b543d03f6727eab0a42d3e039c633789"}}]}], "golden_chunks": [{"doc_uuid": "43bd45ab839500606044476e58992df90d6c72471777260327776df9c3f3d4bd", "index": 1, "content": " ProjectedIterator() = default;\n\n inline ProjectedIterator(Iterator const& iterator, UnaryFunction functor)\n : iterator_(iterator)\n , functor_(std::move(functor)) {\n }\n\n inline UnaryFunction functor() const { return functor; }\n\n inline Iterator const& base() const { return iterator_; }\n\n inline reference operator*() const { return functor_(iterator_); }\n\n inline ProjectedIterator& operator++() {\n ++iterator_;\n return *this;\n }\n\n inline ProjectedIterator& operator--() {\n --iterator_;\n return *this;\n }\n\n inline bool operator==(const ProjectedIterator& other) const {\n return this->iterator_ == other.iterator_;\n }\n\n inline bool operator!=(const ProjectedIterator& other) const {\n return !(*this == other);\n }\n\nprivate:\n Iterator iterator_;\n UnaryFunction functor_;\n};\n\n} // namespace clickhouse\n", "meta": {"hash_id": "3b9094e7210cd293ad6cab4030fa9635b543d03f6727eab0a42d3e039c633789"}}], "meta": {"doctype": "codebase", "relative_path": "/clickhouse/base/projected_iterator.h", "repo_name": "ClickHouse/clickhouse-cpp", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What are the possible values for the ConsoleOutput enum?", "answer": "The ConsoleOutput enum has three possible values, as defined in the code:\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n\nThe values are auto, rich, and plain.", "golden_doc_uuids": ["4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8"], "golden_chunk_uuids": [["4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", 0]], "golden_documents": [{"uuid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-cli/src/main/java/com/google/cloud/tools/jib/cli/logging/ConsoleOutput.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", "index": 0, "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", "meta": {"hash_id": "0911b64bec8c14ceaa9c4059d59e4f5e049b998bcec8793d3b8ccb34f1b352b1"}}]}], "golden_chunks": [{"doc_uuid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", "index": 0, "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", "meta": {"hash_id": "0911b64bec8c14ceaa9c4059d59e4f5e049b998bcec8793d3b8ccb34f1b352b1"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-cli/src/main/java/com/google/cloud/tools/jib/cli/logging/ConsoleOutput.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What package is the ConsoleOutput enum defined in?", "answer": "The ConsoleOutput enum is defined in the package com.google.cloud.tools.jib.cli.logging, as indicated by this line:\npackage com.google.cloud.tools.jib.cli.logging;", "golden_doc_uuids": ["4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8"], "golden_chunk_uuids": [["4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", 0]], "golden_documents": [{"uuid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-cli/src/main/java/com/google/cloud/tools/jib/cli/logging/ConsoleOutput.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", "index": 0, "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", "meta": {"hash_id": "0911b64bec8c14ceaa9c4059d59e4f5e049b998bcec8793d3b8ccb34f1b352b1"}}]}], "golden_chunks": [{"doc_uuid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", "index": 0, "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", "meta": {"hash_id": "0911b64bec8c14ceaa9c4059d59e4f5e049b998bcec8793d3b8ccb34f1b352b1"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-cli/src/main/java/com/google/cloud/tools/jib/cli/logging/ConsoleOutput.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What do the different values of the ConsoleOutput enum represent?", "answer": "Based on the names, the values likely represent different modes for outputting to the console:\nauto likely means the output format is automatically determined.\nrich likely means the output uses rich formatting like colors or special characters.\nplain likely means the output is plain text without any special formatting.\nHowever, the specific behavior for each value is not defined in this file. The code that uses this enum would need to be consulted to determine the exact behavior.", "golden_doc_uuids": ["4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8"], "golden_chunk_uuids": [["4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", 0]], "golden_documents": [{"uuid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-cli/src/main/java/com/google/cloud/tools/jib/cli/logging/ConsoleOutput.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", "index": 0, "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", "meta": {"hash_id": "0911b64bec8c14ceaa9c4059d59e4f5e049b998bcec8793d3b8ccb34f1b352b1"}}]}], "golden_chunks": [{"doc_uuid": "4969d098eff293d66a67d63d1c2d7c785a7b09666272510b9e0c6324e8246dc8", "index": 0, "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.cli.logging;\n\npublic enum ConsoleOutput {\n auto,\n rich,\n plain\n}\n", "meta": {"hash_id": "0911b64bec8c14ceaa9c4059d59e4f5e049b998bcec8793d3b8ccb34f1b352b1"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-cli/src/main/java/com/google/cloud/tools/jib/cli/logging/ConsoleOutput.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How does the UpdateChecker store the timestamp of the last update check?", "answer": "The UpdateChecker stores the timestamp of the last update check in a file named \"lastUpdateCheck\" in the configuration directory. After a successful update check, it writes the current timestamp to this file. This can be seen in the following code:\n\n Path lastUpdateCheckTemp =\n Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null);\n lastUpdateCheckTemp.toFile().deleteOnExit();\n Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8));\n Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING);\n", "golden_doc_uuids": ["e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66"], "golden_chunk_uuids": [["e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", 2]], "golden_documents": [{"uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\nimport com.fasterxml.jackson.annotation.JsonIgnoreProperties;\nimport com.google.cloud.tools.jib.api.LogEvent;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.http.Request;\nimport com.google.cloud.tools.jib.http.Response;\nimport com.google.cloud.tools.jib.json.JsonTemplate;\nimport com.google.cloud.tools.jib.json.JsonTemplateMapper;\nimport com.google.cloud.tools.jib.plugins.common.globalconfig.GlobalConfig;\nimport com.google.common.annotations.VisibleForTesting;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardCopyOption;\nimport java.time.Duration;\nimport java.time.Instant;\nimport java.time.format.DateTimeParseException;\nimport java.util.Optional;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Future;\nimport java.util.function.Consumer;\n\n/** Checks if Jib is up-to-date. */\npublic class UpdateChecker {\n\n private static final String LAST_UPDATE_CHECK_FILENAME = \"lastUpdateCheck\";\n\n /** JSON template for content downloaded during version check. */\n @JsonIgnoreProperties(ignoreUnknown = true)\n private static class VersionJsonTemplate implements JsonTemplate {\n private String latest = \"\";\n }\n\n /**\n * Begins checking for an update in a separate thread.\n *\n * @param executorService the {@link ExecutorService}\n * @param versionUrl the location to check for the latest version\n * @param toolName the tool name\n * @param toolVersion the tool version\n * @param log {@link Consumer} used to log messages\n * @return a new {@link UpdateChecker}\n */\n public static Future> checkForUpdate(\n ExecutorService executorService,\n String versionUrl,\n String toolName,\n String toolVersion,\n Consumer log) {\n return executorService.submit(\n () ->\n performUpdateCheck(\n GlobalConfig.getConfigDir(), toolVersion, versionUrl, toolName, log));\n }\n\n @VisibleForTesting\n static Optional performUpdateCheck(\n Path configDir,\n String currentVersion,\n String versionUrl,\n String toolName,\n Consumer log) {\n Path lastUpdateCheck = configDir.resolve(LAST_UPDATE_CHECK_FILENAME);\n\n try {\n // Check time of last update check\n if (Files.exists(lastUpdateCheck)) {\n try {\n String fileContents =\n new String(Files.readAllBytes(lastUpdateCheck), StandardCharsets.UTF_8);\n Instant modifiedTime = Instant.parse(fileContents);\n if (modifiedTime.plus(Duration.ofDays(1)).isAfter(Instant.now())) {\n return Optional.empty();\n }\n } catch (DateTimeParseException | IOException ex) {\n // If reading update time failed, file might be corrupt, so delete it\n log.accept(LogEvent.debug(\"Failed to read lastUpdateCheck; \" + ex.getMessage()));\n Files.delete(lastUpdateCheck);\n }\n }\n\n // Check for update\n FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n try {\n Response response =\n httpClient.get(\n new URL(versionUrl),\n Request.builder()\n .setHttpTimeout(3000)\n .setUserAgent(\"jib \" + currentVersion + \" \" + toolName)\n .build());\n VersionJsonTemplate version =\n JsonTemplateMapper.readJson(response.getBody(), VersionJsonTemplate.class);\n\n Path lastUpdateCheckTemp =\n Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null);\n lastUpdateCheckTemp.toFile().deleteOnExit();\n Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8));\n Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING);\n\n if (currentVersion.equals(version.latest)) {\n return Optional.empty();\n }\n return Optional.of(version.latest);\n } finally {\n httpClient.shutDown();\n }\n\n } catch (IOException ex) {\n log.accept(LogEvent.debug(\"Update check failed; \" + ex.getMessage()));\n }\n\n return Optional.empty();\n }\n\n /**\n * Returns the latest Jib version available if the check succeeded and the current version is\n * outdated, or returns {@code Optional.empty()} if the check was interrupted or did not determine\n * that a later version was available.\n *\n * @param updateMessageFuture the {@link Future} returned by {@link UpdateChecker#checkForUpdate}\n * @return the latest version, if found, else {@code Optional.empty()}.\n */\n public static Optional finishUpdateCheck(Future> updateMessageFuture) {\n if (updateMessageFuture.isDone()) {\n try {\n return updateMessageFuture.get();\n } catch (InterruptedException | ExecutionException ex) {\n // No need to restore the interrupted status. The intention here is to silently consume any\n // kind of error\n }\n }\n updateMessageFuture.cancel(true);\n return Optional.empty();\n }\n\n private UpdateChecker() {}\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-plugins-common/src/main/java/com/google/cloud/tools/jib/plugins/common/UpdateChecker.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 0, "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\n", "meta": {"hash_id": "a42afaf9f5cbd2ff61ba5a24ac54c48d7d357c89144983abb7e7c64f9c23dd4b"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 1, "content": "import com.fasterxml.jackson.annotation.JsonIgnoreProperties;\nimport com.google.cloud.tools.jib.api.LogEvent;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.http.Request;\nimport com.google.cloud.tools.jib.http.Response;\nimport com.google.cloud.tools.jib.json.JsonTemplate;\nimport com.google.cloud.tools.jib.json.JsonTemplateMapper;\nimport com.google.cloud.tools.jib.plugins.common.globalconfig.GlobalConfig;\nimport com.google.common.annotations.VisibleForTesting;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardCopyOption;\nimport java.time.Duration;\nimport java.time.Instant;\nimport java.time.format.DateTimeParseException;\nimport java.util.Optional;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Future;\nimport java.util.function.Consumer;\n\n", "meta": {"hash_id": "f316b5b33a752be2f509c9279017b61ab71f4ae6862d598369573b2fcca9b408"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 2, "content": "/** Checks if Jib is up-to-date. */\npublic class UpdateChecker {\n\n private static final String LAST_UPDATE_CHECK_FILENAME = \"lastUpdateCheck\";\n\n /** JSON template for content downloaded during version check. */\n @JsonIgnoreProperties(ignoreUnknown = true)\n private static class VersionJsonTemplate implements JsonTemplate {\n private String latest = \"\";\n }\n\n", "meta": {"hash_id": "adaf46a10c9e5f86ee717351bafda6e9d08a321dee6e18b12afb07bceb635462"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 3, "content": " /**\n * Begins checking for an update in a separate thread.\n *\n * @param executorService the {@link ExecutorService}\n * @param versionUrl the location to check for the latest version\n * @param toolName the tool name\n * @param toolVersion the tool version\n * @param log {@link Consumer} used to log messages\n * @return a new {@link UpdateChecker}\n */\n public static Future> checkForUpdate(\n ExecutorService executorService,\n String versionUrl,\n String toolName,\n String toolVersion,\n Consumer log) {\n return executorService.submit(\n () ->\n performUpdateCheck(\n GlobalConfig.getConfigDir(), toolVersion, versionUrl, toolName, log));\n }\n\n", "meta": {"hash_id": "3c57c61b60a92bf7a3bac93aa4d55b7bff5f1bac5c83db7dc42c20ef5e2076d4"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 4, "content": " @VisibleForTesting\n static Optional performUpdateCheck(\n Path configDir,\n String currentVersion,\n String versionUrl,\n String toolName,\n Consumer log) {\n Path lastUpdateCheck = configDir.resolve(LAST_UPDATE_CHECK_FILENAME);\n\n try {\n // Check time of last update check\n if (Files.exists(lastUpdateCheck)) {\n try {\n String fileContents =\n new String(Files.readAllBytes(lastUpdateCheck), StandardCharsets.UTF_8);\n Instant modifiedTime = Instant.parse(fileContents);\n if (modifiedTime.plus(Duration.ofDays(1)).isAfter(Instant.now())) {\n return Optional.empty();\n }\n } catch (DateTimeParseException | IOException ex) {\n // If reading update time failed, file might be corrupt, so delete it\n log.accept(LogEvent.debug(\"Failed to read lastUpdateCheck; \" + ex.getMessage()));\n Files.delete(lastUpdateCheck);\n }\n }\n\n", "meta": {"hash_id": "409c3122079736dc530dad3a2888774f506b96ce32215eda2a05a84c8592681d"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 5, "content": " // Check for update\n FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n try {\n Response response =\n httpClient.get(\n new URL(versionUrl),\n Request.builder()\n .setHttpTimeout(3000)\n .setUserAgent(\"jib \" + currentVersion + \" \" + toolName)\n .build());\n VersionJsonTemplate version =\n JsonTemplateMapper.readJson(response.getBody(), VersionJsonTemplate.class);\n\n", "meta": {"hash_id": "cbec07f2a6c1bddffbfc3072d8e2cbec93842e4589a80af03d8906c5cc256390"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 6, "content": " Path lastUpdateCheckTemp =\n Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null);\n lastUpdateCheckTemp.toFile().deleteOnExit();\n Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8));\n Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING);\n\n if (currentVersion.equals(version.latest)) {\n return Optional.empty();\n }\n return Optional.of(version.latest);\n } finally {\n httpClient.shutDown();\n }\n\n } catch (IOException ex) {\n log.accept(LogEvent.debug(\"Update check failed; \" + ex.getMessage()));\n }\n\n return Optional.empty();\n }\n\n", "meta": {"hash_id": "05c168f41962e4811da9ee79f3d5a3d62f1a4d3acb264b73a906aa89c264a06f"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 7, "content": " /**\n * Returns the latest Jib version available if the check succeeded and the current version is\n * outdated, or returns {@code Optional.empty()} if the check was interrupted or did not determine\n * that a later version was available.\n *\n * @param updateMessageFuture the {@link Future} returned by {@link UpdateChecker#checkForUpdate}\n * @return the latest version, if found, else {@code Optional.empty()}.\n */\n public static Optional finishUpdateCheck(Future> updateMessageFuture) {\n if (updateMessageFuture.isDone()) {\n try {\n return updateMessageFuture.get();\n } catch (InterruptedException | ExecutionException ex) {\n // No need to restore the interrupted status. The intention here is to silently consume any\n // kind of error\n }\n }\n updateMessageFuture.cancel(true);\n return Optional.empty();\n }\n\n private UpdateChecker() {}\n}\n", "meta": {"hash_id": "1208418459c7330c43a2277a78aa1ce39de6ec09789eeff9fa7d8932415665cb"}}]}], "golden_chunks": [{"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 2, "content": "/** Checks if Jib is up-to-date. */\npublic class UpdateChecker {\n\n private static final String LAST_UPDATE_CHECK_FILENAME = \"lastUpdateCheck\";\n\n /** JSON template for content downloaded during version check. */\n @JsonIgnoreProperties(ignoreUnknown = true)\n private static class VersionJsonTemplate implements JsonTemplate {\n private String latest = \"\";\n }\n\n", "meta": {"hash_id": "adaf46a10c9e5f86ee717351bafda6e9d08a321dee6e18b12afb07bceb635462"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-plugins-common/src/main/java/com/google/cloud/tools/jib/plugins/common/UpdateChecker.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}} +{"query": "What does the UpdateChecker return if the current version is up to date?", "answer": "If the UpdateChecker determines that the current version is equal to the latest version retrieved from the server, it returns Optional.empty(). This can be seen in the following code:\n\n if (currentVersion.equals(version.latest)) {\n return Optional.empty();\n }\n", "golden_doc_uuids": ["e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66"], "golden_chunk_uuids": [["e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", 3], ["e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", 2], ["e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", 1], ["e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", 0]], "golden_documents": [{"uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\nimport com.fasterxml.jackson.annotation.JsonIgnoreProperties;\nimport com.google.cloud.tools.jib.api.LogEvent;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.http.Request;\nimport com.google.cloud.tools.jib.http.Response;\nimport com.google.cloud.tools.jib.json.JsonTemplate;\nimport com.google.cloud.tools.jib.json.JsonTemplateMapper;\nimport com.google.cloud.tools.jib.plugins.common.globalconfig.GlobalConfig;\nimport com.google.common.annotations.VisibleForTesting;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardCopyOption;\nimport java.time.Duration;\nimport java.time.Instant;\nimport java.time.format.DateTimeParseException;\nimport java.util.Optional;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Future;\nimport java.util.function.Consumer;\n\n/** Checks if Jib is up-to-date. */\npublic class UpdateChecker {\n\n private static final String LAST_UPDATE_CHECK_FILENAME = \"lastUpdateCheck\";\n\n /** JSON template for content downloaded during version check. */\n @JsonIgnoreProperties(ignoreUnknown = true)\n private static class VersionJsonTemplate implements JsonTemplate {\n private String latest = \"\";\n }\n\n /**\n * Begins checking for an update in a separate thread.\n *\n * @param executorService the {@link ExecutorService}\n * @param versionUrl the location to check for the latest version\n * @param toolName the tool name\n * @param toolVersion the tool version\n * @param log {@link Consumer} used to log messages\n * @return a new {@link UpdateChecker}\n */\n public static Future> checkForUpdate(\n ExecutorService executorService,\n String versionUrl,\n String toolName,\n String toolVersion,\n Consumer log) {\n return executorService.submit(\n () ->\n performUpdateCheck(\n GlobalConfig.getConfigDir(), toolVersion, versionUrl, toolName, log));\n }\n\n @VisibleForTesting\n static Optional performUpdateCheck(\n Path configDir,\n String currentVersion,\n String versionUrl,\n String toolName,\n Consumer log) {\n Path lastUpdateCheck = configDir.resolve(LAST_UPDATE_CHECK_FILENAME);\n\n try {\n // Check time of last update check\n if (Files.exists(lastUpdateCheck)) {\n try {\n String fileContents =\n new String(Files.readAllBytes(lastUpdateCheck), StandardCharsets.UTF_8);\n Instant modifiedTime = Instant.parse(fileContents);\n if (modifiedTime.plus(Duration.ofDays(1)).isAfter(Instant.now())) {\n return Optional.empty();\n }\n } catch (DateTimeParseException | IOException ex) {\n // If reading update time failed, file might be corrupt, so delete it\n log.accept(LogEvent.debug(\"Failed to read lastUpdateCheck; \" + ex.getMessage()));\n Files.delete(lastUpdateCheck);\n }\n }\n\n // Check for update\n FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n try {\n Response response =\n httpClient.get(\n new URL(versionUrl),\n Request.builder()\n .setHttpTimeout(3000)\n .setUserAgent(\"jib \" + currentVersion + \" \" + toolName)\n .build());\n VersionJsonTemplate version =\n JsonTemplateMapper.readJson(response.getBody(), VersionJsonTemplate.class);\n\n Path lastUpdateCheckTemp =\n Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null);\n lastUpdateCheckTemp.toFile().deleteOnExit();\n Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8));\n Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING);\n\n if (currentVersion.equals(version.latest)) {\n return Optional.empty();\n }\n return Optional.of(version.latest);\n } finally {\n httpClient.shutDown();\n }\n\n } catch (IOException ex) {\n log.accept(LogEvent.debug(\"Update check failed; \" + ex.getMessage()));\n }\n\n return Optional.empty();\n }\n\n /**\n * Returns the latest Jib version available if the check succeeded and the current version is\n * outdated, or returns {@code Optional.empty()} if the check was interrupted or did not determine\n * that a later version was available.\n *\n * @param updateMessageFuture the {@link Future} returned by {@link UpdateChecker#checkForUpdate}\n * @return the latest version, if found, else {@code Optional.empty()}.\n */\n public static Optional finishUpdateCheck(Future> updateMessageFuture) {\n if (updateMessageFuture.isDone()) {\n try {\n return updateMessageFuture.get();\n } catch (InterruptedException | ExecutionException ex) {\n // No need to restore the interrupted status. The intention here is to silently consume any\n // kind of error\n }\n }\n updateMessageFuture.cancel(true);\n return Optional.empty();\n }\n\n private UpdateChecker() {}\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-plugins-common/src/main/java/com/google/cloud/tools/jib/plugins/common/UpdateChecker.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 0, "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\n", "meta": {"hash_id": "a42afaf9f5cbd2ff61ba5a24ac54c48d7d357c89144983abb7e7c64f9c23dd4b"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 1, "content": "import com.fasterxml.jackson.annotation.JsonIgnoreProperties;\nimport com.google.cloud.tools.jib.api.LogEvent;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.http.Request;\nimport com.google.cloud.tools.jib.http.Response;\nimport com.google.cloud.tools.jib.json.JsonTemplate;\nimport com.google.cloud.tools.jib.json.JsonTemplateMapper;\nimport com.google.cloud.tools.jib.plugins.common.globalconfig.GlobalConfig;\nimport com.google.common.annotations.VisibleForTesting;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardCopyOption;\nimport java.time.Duration;\nimport java.time.Instant;\nimport java.time.format.DateTimeParseException;\nimport java.util.Optional;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Future;\nimport java.util.function.Consumer;\n\n", "meta": {"hash_id": "f316b5b33a752be2f509c9279017b61ab71f4ae6862d598369573b2fcca9b408"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 2, "content": "/** Checks if Jib is up-to-date. */\npublic class UpdateChecker {\n\n private static final String LAST_UPDATE_CHECK_FILENAME = \"lastUpdateCheck\";\n\n /** JSON template for content downloaded during version check. */\n @JsonIgnoreProperties(ignoreUnknown = true)\n private static class VersionJsonTemplate implements JsonTemplate {\n private String latest = \"\";\n }\n\n", "meta": {"hash_id": "adaf46a10c9e5f86ee717351bafda6e9d08a321dee6e18b12afb07bceb635462"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 3, "content": " /**\n * Begins checking for an update in a separate thread.\n *\n * @param executorService the {@link ExecutorService}\n * @param versionUrl the location to check for the latest version\n * @param toolName the tool name\n * @param toolVersion the tool version\n * @param log {@link Consumer} used to log messages\n * @return a new {@link UpdateChecker}\n */\n public static Future> checkForUpdate(\n ExecutorService executorService,\n String versionUrl,\n String toolName,\n String toolVersion,\n Consumer log) {\n return executorService.submit(\n () ->\n performUpdateCheck(\n GlobalConfig.getConfigDir(), toolVersion, versionUrl, toolName, log));\n }\n\n", "meta": {"hash_id": "3c57c61b60a92bf7a3bac93aa4d55b7bff5f1bac5c83db7dc42c20ef5e2076d4"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 4, "content": " @VisibleForTesting\n static Optional performUpdateCheck(\n Path configDir,\n String currentVersion,\n String versionUrl,\n String toolName,\n Consumer log) {\n Path lastUpdateCheck = configDir.resolve(LAST_UPDATE_CHECK_FILENAME);\n\n try {\n // Check time of last update check\n if (Files.exists(lastUpdateCheck)) {\n try {\n String fileContents =\n new String(Files.readAllBytes(lastUpdateCheck), StandardCharsets.UTF_8);\n Instant modifiedTime = Instant.parse(fileContents);\n if (modifiedTime.plus(Duration.ofDays(1)).isAfter(Instant.now())) {\n return Optional.empty();\n }\n } catch (DateTimeParseException | IOException ex) {\n // If reading update time failed, file might be corrupt, so delete it\n log.accept(LogEvent.debug(\"Failed to read lastUpdateCheck; \" + ex.getMessage()));\n Files.delete(lastUpdateCheck);\n }\n }\n\n", "meta": {"hash_id": "409c3122079736dc530dad3a2888774f506b96ce32215eda2a05a84c8592681d"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 5, "content": " // Check for update\n FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n try {\n Response response =\n httpClient.get(\n new URL(versionUrl),\n Request.builder()\n .setHttpTimeout(3000)\n .setUserAgent(\"jib \" + currentVersion + \" \" + toolName)\n .build());\n VersionJsonTemplate version =\n JsonTemplateMapper.readJson(response.getBody(), VersionJsonTemplate.class);\n\n", "meta": {"hash_id": "cbec07f2a6c1bddffbfc3072d8e2cbec93842e4589a80af03d8906c5cc256390"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 6, "content": " Path lastUpdateCheckTemp =\n Files.createTempFile(configDir, LAST_UPDATE_CHECK_FILENAME, null);\n lastUpdateCheckTemp.toFile().deleteOnExit();\n Files.write(lastUpdateCheckTemp, Instant.now().toString().getBytes(StandardCharsets.UTF_8));\n Files.move(lastUpdateCheckTemp, lastUpdateCheck, StandardCopyOption.REPLACE_EXISTING);\n\n if (currentVersion.equals(version.latest)) {\n return Optional.empty();\n }\n return Optional.of(version.latest);\n } finally {\n httpClient.shutDown();\n }\n\n } catch (IOException ex) {\n log.accept(LogEvent.debug(\"Update check failed; \" + ex.getMessage()));\n }\n\n return Optional.empty();\n }\n\n", "meta": {"hash_id": "05c168f41962e4811da9ee79f3d5a3d62f1a4d3acb264b73a906aa89c264a06f"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 7, "content": " /**\n * Returns the latest Jib version available if the check succeeded and the current version is\n * outdated, or returns {@code Optional.empty()} if the check was interrupted or did not determine\n * that a later version was available.\n *\n * @param updateMessageFuture the {@link Future} returned by {@link UpdateChecker#checkForUpdate}\n * @return the latest version, if found, else {@code Optional.empty()}.\n */\n public static Optional finishUpdateCheck(Future> updateMessageFuture) {\n if (updateMessageFuture.isDone()) {\n try {\n return updateMessageFuture.get();\n } catch (InterruptedException | ExecutionException ex) {\n // No need to restore the interrupted status. The intention here is to silently consume any\n // kind of error\n }\n }\n updateMessageFuture.cancel(true);\n return Optional.empty();\n }\n\n private UpdateChecker() {}\n}\n", "meta": {"hash_id": "1208418459c7330c43a2277a78aa1ce39de6ec09789eeff9fa7d8932415665cb"}}]}], "golden_chunks": [{"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 3, "content": " /**\n * Begins checking for an update in a separate thread.\n *\n * @param executorService the {@link ExecutorService}\n * @param versionUrl the location to check for the latest version\n * @param toolName the tool name\n * @param toolVersion the tool version\n * @param log {@link Consumer} used to log messages\n * @return a new {@link UpdateChecker}\n */\n public static Future> checkForUpdate(\n ExecutorService executorService,\n String versionUrl,\n String toolName,\n String toolVersion,\n Consumer log) {\n return executorService.submit(\n () ->\n performUpdateCheck(\n GlobalConfig.getConfigDir(), toolVersion, versionUrl, toolName, log));\n }\n\n", "meta": {"hash_id": "3c57c61b60a92bf7a3bac93aa4d55b7bff5f1bac5c83db7dc42c20ef5e2076d4"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 2, "content": "/** Checks if Jib is up-to-date. */\npublic class UpdateChecker {\n\n private static final String LAST_UPDATE_CHECK_FILENAME = \"lastUpdateCheck\";\n\n /** JSON template for content downloaded during version check. */\n @JsonIgnoreProperties(ignoreUnknown = true)\n private static class VersionJsonTemplate implements JsonTemplate {\n private String latest = \"\";\n }\n\n", "meta": {"hash_id": "adaf46a10c9e5f86ee717351bafda6e9d08a321dee6e18b12afb07bceb635462"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 1, "content": "import com.fasterxml.jackson.annotation.JsonIgnoreProperties;\nimport com.google.cloud.tools.jib.api.LogEvent;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.http.Request;\nimport com.google.cloud.tools.jib.http.Response;\nimport com.google.cloud.tools.jib.json.JsonTemplate;\nimport com.google.cloud.tools.jib.json.JsonTemplateMapper;\nimport com.google.cloud.tools.jib.plugins.common.globalconfig.GlobalConfig;\nimport com.google.common.annotations.VisibleForTesting;\nimport java.io.IOException;\nimport java.net.URL;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardCopyOption;\nimport java.time.Duration;\nimport java.time.Instant;\nimport java.time.format.DateTimeParseException;\nimport java.util.Optional;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Future;\nimport java.util.function.Consumer;\n\n", "meta": {"hash_id": "f316b5b33a752be2f509c9279017b61ab71f4ae6862d598369573b2fcca9b408"}}, {"doc_uuid": "e154c2b27db3036da0a3ae9e88f7445ec748ea9d2d1c24b14460801801705c66", "index": 0, "content": "/*\n * Copyright 2020 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\n", "meta": {"hash_id": "a42afaf9f5cbd2ff61ba5a24ac54c48d7d357c89144983abb7e7c64f9c23dd4b"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-plugins-common/src/main/java/com/google/cloud/tools/jib/plugins/common/UpdateChecker.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}} +{"query": "How does the DefaultCredentialRetrievers class handle credential helpers on Windows?", "answer": "On Windows, the DefaultCredentialRetrievers class will look for credential helper executables with a \".cmd\" or \".exe\" extension. This can be seen in the `testCredentialHelper_cmdExtension()` and `testCredentialHelper_exeExtension()` tests:\n```java\nproperties.setProperty(\"os.name\", \"winDOWs\");\nList retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment) \n .setCredentialHelper(pathWithoutCmd.toString())\n .asList();\n```\nWhen \"os.name\" is set to \"winDOWs\", it will look for the credential helper executable with a \".cmd\" or \".exe\" extension appended to the provided path.", "golden_doc_uuids": ["f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913"], "golden_chunk_uuids": [["f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", 23], ["f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", 16]], "golden_documents": [{"uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\nimport static com.google.common.truth.Truth.assertThat;\nimport static com.google.common.truth.Truth8.assertThat;\nimport static org.junit.Assert.assertThrows;\nimport static org.mockito.ArgumentMatchers.anyString;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport com.google.cloud.tools.jib.api.Credential;\nimport com.google.cloud.tools.jib.api.CredentialRetriever;\nimport com.google.cloud.tools.jib.frontend.CredentialRetrieverFactory;\nimport com.google.common.collect.ImmutableMap;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n/** Tests for {@link DefaultCredentialRetrievers}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class DefaultCredentialRetrieversTest {\n\n @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Mock private CredentialRetrieverFactory mockCredentialRetrieverFactory;\n @Mock private CredentialRetriever mockDockerCredentialHelperCredentialRetriever;\n @Mock private CredentialRetriever mockKnownCredentialRetriever;\n @Mock private CredentialRetriever mockInferredCredentialRetriever;\n @Mock private CredentialRetriever mockWellKnownCredentialHelpersCredentialRetriever;\n @Mock private CredentialRetriever mockXdgPrimaryCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockApplicationDefaultCredentialRetriever;\n\n private Properties properties;\n private Map environment;\n\n private final Credential knownCredential = Credential.from(\"username\", \"password\");\n private final Credential inferredCredential = Credential.from(\"username2\", \"password2\");\n\n @Before\n public void setUp() {\n properties = new Properties();\n properties.setProperty(\"os.name\", \"unknown\");\n properties.setProperty(\"user.home\", Paths.get(\"/system/home\").toString());\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/docker_config\").toString(),\n \"XDG_RUNTIME_DIR\",\n Paths.get(\"/run/user/1000\").toString(),\n \"XDG_CONFIG_HOME\",\n Paths.get(\"/env/home/.config\").toString());\n\n when(mockCredentialRetrieverFactory.dockerCredentialHelper(anyString()))\n .thenReturn(mockDockerCredentialHelperCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(knownCredential, \"credentialSource\"))\n .thenReturn(mockKnownCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(inferredCredential, \"inferredCredentialSource\"))\n .thenReturn(mockInferredCredentialRetriever);\n when(mockCredentialRetrieverFactory.wellKnownCredentialHelpers())\n .thenReturn(mockWellKnownCredentialHelpersCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/run/user/1000/containers/auth.json\")))\n .thenReturn(mockXdgPrimaryCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.config/containers/auth.json\")))\n .thenReturn(mockEnvHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.config/containers/auth.json\")))\n .thenReturn(mockSystemHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/config.json\")))\n .thenReturn(mockDockerConfigEnvDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/.dockerconfigjson\")))\n .thenReturn(mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(Paths.get(\"/docker_config/.dockercfg\")))\n .thenReturn(mockDockerConfigEnvLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/system/home/.docker/config.json\")))\n .thenReturn(mockSystemHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockSystemHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/system/home/.docker/.dockercfg\")))\n .thenReturn(mockSystemHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/env/home/.docker/config.json\")))\n .thenReturn(mockEnvHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockEnvHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/env/home/.docker/.dockercfg\")))\n .thenReturn(mockEnvHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.googleApplicationDefaultCredentials())\n .thenReturn(mockApplicationDefaultCredentialRetriever);\n }\n\n @Test\n public void testAsList() throws FileNotFoundException {\n List retriever =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retriever)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testAsList_all() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setKnownCredential(knownCredential, \"credentialSource\")\n .setInferredCredential(inferredCredential, \"inferredCredentialSource\")\n .setCredentialHelper(\"credentialHelperSuffix\")\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockKnownCredentialRetriever,\n mockDockerCredentialHelperCredentialRetriever,\n mockInferredCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n verify(mockCredentialRetrieverFactory).known(knownCredential, \"credentialSource\");\n verify(mockCredentialRetrieverFactory).known(inferredCredential, \"inferredCredentialSource\");\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(\"docker-credential-credentialHelperSuffix\");\n }\n\n @Test\n public void testAsList_credentialHelperPath() throws IOException {\n Path fakeCredentialHelperPath = temporaryFolder.newFile(\"fake-credHelper\").toPath();\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(fakeCredentialHelperPath.toString());\n\n List retrievers = credentialRetrievers.asList();\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(fakeCredentialHelperPath.toString());\n\n Files.delete(fakeCredentialHelperPath);\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex)\n .hasMessageThat()\n .isEqualTo(\"Specified credential helper was not found: \" + fakeCredentialHelperPath);\n }\n\n @Test\n public void testDockerConfigRetrievers_undefinedHome() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(\n mockCredentialRetrieverFactory, new Properties(), new HashMap<>())\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testDockerConfigRetrievers_noDuplicateRetrievers() throws FileNotFoundException {\n properties.setProperty(\"user.home\", Paths.get(\"/env/home\").toString());\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/env/home/.docker\").toString());\n retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockEnvHomeXdgCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testCredentialHelper_cmdExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.cmd\").toPath();\n Path pathWithoutCmd = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutCmd.getParent().resolve(\"foo.cmd\"));\n\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString())\n .asList();\n\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testCredentialHelper_exeExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.exe\").toPath();\n Path pathWithoutExe = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutExe.getParent().resolve(\"foo.exe\"));\n\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString())\n .asList();\n\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-plugins-common/src/test/java/com/google/cloud/tools/jib/plugins/common/DefaultCredentialRetrieversTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 29, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "2005dc9dac601488b647062a1255381fc195d7d57d416089a04cb3e6190e37c5"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 1, "content": "package com.google.cloud.tools.jib.plugins.common;\n\nimport static com.google.common.truth.Truth.assertThat;\nimport static com.google.common.truth.Truth8.assertThat;\nimport static org.junit.Assert.assertThrows;\nimport static org.mockito.ArgumentMatchers.anyString;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\n", "meta": {"hash_id": "bb6c3b193144ee26ad7e6e2af856db9b3b9b465d0f875a909acc929b7bd703c2"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 2, "content": "import com.google.cloud.tools.jib.api.Credential;\nimport com.google.cloud.tools.jib.api.CredentialRetriever;\nimport com.google.cloud.tools.jib.frontend.CredentialRetrieverFactory;\nimport com.google.common.collect.ImmutableMap;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n/** Tests for {@link DefaultCredentialRetrievers}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class DefaultCredentialRetrieversTest {\n\n @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n", "meta": {"hash_id": "0d9d045411df5c12b00b7a96fc704cba55d27bce8f2d8f7af0ca1a2f34dec93b"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 3, "content": " @Mock private CredentialRetrieverFactory mockCredentialRetrieverFactory;\n @Mock private CredentialRetriever mockDockerCredentialHelperCredentialRetriever;\n @Mock private CredentialRetriever mockKnownCredentialRetriever;\n @Mock private CredentialRetriever mockInferredCredentialRetriever;\n @Mock private CredentialRetriever mockWellKnownCredentialHelpersCredentialRetriever;\n @Mock private CredentialRetriever mockXdgPrimaryCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvLegacyDockerConfigCredentialRetriever;\n", "meta": {"hash_id": "b9ea067c1474786132dd8d58826d215fb051fee8aa181ad51997a3199427ac64"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 4, "content": " @Mock private CredentialRetriever mockSystemHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockApplicationDefaultCredentialRetriever;\n\n", "meta": {"hash_id": "f12335ea57aa85f16e42ac2906afab0af075cd0e7ac85ed2e9b0554cdcfd390a"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 5, "content": " private Properties properties;\n private Map environment;\n\n private final Credential knownCredential = Credential.from(\"username\", \"password\");\n private final Credential inferredCredential = Credential.from(\"username2\", \"password2\");\n\n @Before\n public void setUp() {\n properties = new Properties();\n properties.setProperty(\"os.name\", \"unknown\");\n properties.setProperty(\"user.home\", Paths.get(\"/system/home\").toString());\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/docker_config\").toString(),\n \"XDG_RUNTIME_DIR\",\n Paths.get(\"/run/user/1000\").toString(),\n \"XDG_CONFIG_HOME\",\n Paths.get(\"/env/home/.config\").toString());\n\n", "meta": {"hash_id": "965492a5f2fd985bc0ad83356fdddce8ff0d1849274e63ea72abbe5f24bc1b14"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 6, "content": " when(mockCredentialRetrieverFactory.dockerCredentialHelper(anyString()))\n .thenReturn(mockDockerCredentialHelperCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(knownCredential, \"credentialSource\"))\n .thenReturn(mockKnownCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(inferredCredential, \"inferredCredentialSource\"))\n .thenReturn(mockInferredCredentialRetriever);\n when(mockCredentialRetrieverFactory.wellKnownCredentialHelpers())\n .thenReturn(mockWellKnownCredentialHelpersCredentialRetriever);\n\n", "meta": {"hash_id": "4c95cb0f3fd04bdcb6958f4a3a066d4b4e1d2030011626d92ca9e8a115c08306"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 7, "content": " when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/run/user/1000/containers/auth.json\")))\n .thenReturn(mockXdgPrimaryCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.config/containers/auth.json\")))\n .thenReturn(mockEnvHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.config/containers/auth.json\")))\n .thenReturn(mockSystemHomeXdgCredentialRetriever);\n\n", "meta": {"hash_id": "ea39a9cdc3386887c1d83a1f9534b3f3ae638d4e3e3c126d398a51911f544fee"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 8, "content": " when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/config.json\")))\n .thenReturn(mockDockerConfigEnvDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/.dockerconfigjson\")))\n .thenReturn(mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(Paths.get(\"/docker_config/.dockercfg\")))\n .thenReturn(mockDockerConfigEnvLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/system/home/.docker/config.json\")))\n .thenReturn(mockSystemHomeDockerConfigCredentialRetriever);\n", "meta": {"hash_id": "d6132db7cd3c0b6e5aba062bd4a977702ab73ca213fdca18cf69c8fa9d908f96"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 9, "content": " when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockSystemHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/system/home/.docker/.dockercfg\")))\n .thenReturn(mockSystemHomeLegacyDockerConfigCredentialRetriever);\n", "meta": {"hash_id": "19651a57b947f914ced45377d46eeed9a1e4ad9b13a0779180ccb684748cdf59"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 10, "content": " when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/env/home/.docker/config.json\")))\n .thenReturn(mockEnvHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockEnvHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/env/home/.docker/.dockercfg\")))\n .thenReturn(mockEnvHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.googleApplicationDefaultCredentials())\n .thenReturn(mockApplicationDefaultCredentialRetriever);\n }\n\n", "meta": {"hash_id": "bb0b7bda9e5d7c89531635d2af732075fedbc0b140e5860d25a72fbc911dff61"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 11, "content": " @Test\n public void testAsList() throws FileNotFoundException {\n List retriever =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retriever)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n", "meta": {"hash_id": "12a9e35d06cd91fbc89418844451a6e210c81e70f0a257758bcd53ffe6a92d74"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 12, "content": " mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "meta": {"hash_id": "315fa52f304b785f319d7217db56cbdb420ca8b13b6e4c995814fa3f1d2e8f69"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 13, "content": " @Test\n public void testAsList_all() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setKnownCredential(knownCredential, \"credentialSource\")\n .setInferredCredential(inferredCredential, \"inferredCredentialSource\")\n .setCredentialHelper(\"credentialHelperSuffix\")\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockKnownCredentialRetriever,\n mockDockerCredentialHelperCredentialRetriever,\n mockInferredCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n", "meta": {"hash_id": "50125214400c0ea1df7ab9764fa6c35288de87a85e32c2e515e1b13839e12cab"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 14, "content": " mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n", "meta": {"hash_id": "f46a5cae20e5306eaa61725dd03529c1f762ab2077bad5af1cd1702286853ff1"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 15, "content": " verify(mockCredentialRetrieverFactory).known(knownCredential, \"credentialSource\");\n verify(mockCredentialRetrieverFactory).known(inferredCredential, \"inferredCredentialSource\");\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(\"docker-credential-credentialHelperSuffix\");\n }\n\n", "meta": {"hash_id": "ba9faca0cbb89f4d838491f6c2013f1bcfb2e825e3ceebde43f0bc1a45f41bc3"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 16, "content": " @Test\n public void testAsList_credentialHelperPath() throws IOException {\n Path fakeCredentialHelperPath = temporaryFolder.newFile(\"fake-credHelper\").toPath();\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(fakeCredentialHelperPath.toString());\n\n", "meta": {"hash_id": "5fee3f00c557c6ba06f33ad916f53823799d9aa419bc0027366e670f5f804a6a"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 17, "content": " List retrievers = credentialRetrievers.asList();\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n", "meta": {"hash_id": "35ac8c2dbc37c612f750a22a86c8257039063d524e7773315c8056adeaf59a34"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 18, "content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(fakeCredentialHelperPath.toString());\n\n", "meta": {"hash_id": "5c6d342cf45b768601ae025cf45a78b70666d6dcefefa1c17fa4a2ade1305ba0"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 19, "content": " Files.delete(fakeCredentialHelperPath);\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex)\n .hasMessageThat()\n .isEqualTo(\"Specified credential helper was not found: \" + fakeCredentialHelperPath);\n }\n\n @Test\n public void testDockerConfigRetrievers_undefinedHome() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(\n mockCredentialRetrieverFactory, new Properties(), new HashMap<>())\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "meta": {"hash_id": "03aa0e2e77899856d8f0767dad3ef33a22ecaf35e50b88cc83e16307011556e1"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 20, "content": " @Test\n public void testDockerConfigRetrievers_noDuplicateRetrievers() throws FileNotFoundException {\n properties.setProperty(\"user.home\", Paths.get(\"/env/home\").toString());\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n", "meta": {"hash_id": "f1067ed553a8825d92d16e47aa7ef1a199387dfabb6623157d50c31af122e711"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 21, "content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n", "meta": {"hash_id": "1fd09b33b29b958bb191599bd93420f960326e2ed4d78093cf429f75ee45f8ee"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 22, "content": " environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/env/home/.docker\").toString());\n retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockEnvHomeXdgCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "meta": {"hash_id": "4a3ec59aec48d00bfa9257b6253641ce187d03d9452335640ddfd9a6481c0753"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 23, "content": " @Test\n public void testCredentialHelper_cmdExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.cmd\").toPath();\n Path pathWithoutCmd = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutCmd.getParent().resolve(\"foo.cmd\"));\n\n", "meta": {"hash_id": "c2012ec081c7a07f856b39475dd4c17f874a7b697d4b843e1c289d1fcf4a042a"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 24, "content": " DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString())\n .asList();\n\n", "meta": {"hash_id": "a1800a8d7fc791dc1b84353e6c9a97970de9e4ea43c45f8b278f9d74954ac485"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 25, "content": " assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "meta": {"hash_id": "ecdf3531c7967e2f8a9c25e32ffb5310ba0fe5403e1186661bb5fd7300ded319"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 26, "content": " @Test\n public void testCredentialHelper_exeExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.exe\").toPath();\n Path pathWithoutExe = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutExe.getParent().resolve(\"foo.exe\"));\n\n", "meta": {"hash_id": "adde1b937a648a858e3e7ccefa1f5124bfd6b02f750e062428a87831387ac3fb"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 27, "content": " DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString())\n .asList();\n\n", "meta": {"hash_id": "6df52f2ced9f42c4552ce9b342dd81a5ad408d8d32754ef68f1d69ee45998efe"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 28, "content": " assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n}\n", "meta": {"hash_id": "2c9a2cd683ab45b6ae2e27c536adb694c2c35006f2872624474f320db21b9e97"}}]}], "golden_chunks": [{"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 23, "content": " @Test\n public void testCredentialHelper_cmdExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.cmd\").toPath();\n Path pathWithoutCmd = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutCmd.getParent().resolve(\"foo.cmd\"));\n\n", "meta": {"hash_id": "c2012ec081c7a07f856b39475dd4c17f874a7b697d4b843e1c289d1fcf4a042a"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 16, "content": " @Test\n public void testAsList_credentialHelperPath() throws IOException {\n Path fakeCredentialHelperPath = temporaryFolder.newFile(\"fake-credHelper\").toPath();\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(fakeCredentialHelperPath.toString());\n\n", "meta": {"hash_id": "5fee3f00c557c6ba06f33ad916f53823799d9aa419bc0027366e670f5f804a6a"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-plugins-common/src/test/java/com/google/cloud/tools/jib/plugins/common/DefaultCredentialRetrieversTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 29, "source": "codebases/jsonl"}} +{"query": "How does the DefaultCredentialRetrievers class avoid duplicate CredentialRetriever instances?", "answer": "The `testDockerConfigRetrievers_noDuplicateRetrievers()` test shows that the class avoids creating duplicate CredentialRetriever instances when there is overlap between the \"user.home\" system property and the \"HOME\" environment variable:\n```java \nproperties.setProperty(\"user.home\", Paths.get(\"/env/home\").toString());\nList retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n```\nWith this setup, it only includes CredentialRetrievers for the \"/env/home\" paths, omitting the duplicates for \"/system/home\".", "golden_doc_uuids": ["f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913"], "golden_chunk_uuids": [["f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", 20], ["f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", 18], ["f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", 15], ["f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", 0]], "golden_documents": [{"uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\nimport static com.google.common.truth.Truth.assertThat;\nimport static com.google.common.truth.Truth8.assertThat;\nimport static org.junit.Assert.assertThrows;\nimport static org.mockito.ArgumentMatchers.anyString;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport com.google.cloud.tools.jib.api.Credential;\nimport com.google.cloud.tools.jib.api.CredentialRetriever;\nimport com.google.cloud.tools.jib.frontend.CredentialRetrieverFactory;\nimport com.google.common.collect.ImmutableMap;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n/** Tests for {@link DefaultCredentialRetrievers}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class DefaultCredentialRetrieversTest {\n\n @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Mock private CredentialRetrieverFactory mockCredentialRetrieverFactory;\n @Mock private CredentialRetriever mockDockerCredentialHelperCredentialRetriever;\n @Mock private CredentialRetriever mockKnownCredentialRetriever;\n @Mock private CredentialRetriever mockInferredCredentialRetriever;\n @Mock private CredentialRetriever mockWellKnownCredentialHelpersCredentialRetriever;\n @Mock private CredentialRetriever mockXdgPrimaryCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockApplicationDefaultCredentialRetriever;\n\n private Properties properties;\n private Map environment;\n\n private final Credential knownCredential = Credential.from(\"username\", \"password\");\n private final Credential inferredCredential = Credential.from(\"username2\", \"password2\");\n\n @Before\n public void setUp() {\n properties = new Properties();\n properties.setProperty(\"os.name\", \"unknown\");\n properties.setProperty(\"user.home\", Paths.get(\"/system/home\").toString());\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/docker_config\").toString(),\n \"XDG_RUNTIME_DIR\",\n Paths.get(\"/run/user/1000\").toString(),\n \"XDG_CONFIG_HOME\",\n Paths.get(\"/env/home/.config\").toString());\n\n when(mockCredentialRetrieverFactory.dockerCredentialHelper(anyString()))\n .thenReturn(mockDockerCredentialHelperCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(knownCredential, \"credentialSource\"))\n .thenReturn(mockKnownCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(inferredCredential, \"inferredCredentialSource\"))\n .thenReturn(mockInferredCredentialRetriever);\n when(mockCredentialRetrieverFactory.wellKnownCredentialHelpers())\n .thenReturn(mockWellKnownCredentialHelpersCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/run/user/1000/containers/auth.json\")))\n .thenReturn(mockXdgPrimaryCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.config/containers/auth.json\")))\n .thenReturn(mockEnvHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.config/containers/auth.json\")))\n .thenReturn(mockSystemHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/config.json\")))\n .thenReturn(mockDockerConfigEnvDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/.dockerconfigjson\")))\n .thenReturn(mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(Paths.get(\"/docker_config/.dockercfg\")))\n .thenReturn(mockDockerConfigEnvLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/system/home/.docker/config.json\")))\n .thenReturn(mockSystemHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockSystemHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/system/home/.docker/.dockercfg\")))\n .thenReturn(mockSystemHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/env/home/.docker/config.json\")))\n .thenReturn(mockEnvHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockEnvHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/env/home/.docker/.dockercfg\")))\n .thenReturn(mockEnvHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.googleApplicationDefaultCredentials())\n .thenReturn(mockApplicationDefaultCredentialRetriever);\n }\n\n @Test\n public void testAsList() throws FileNotFoundException {\n List retriever =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retriever)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testAsList_all() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setKnownCredential(knownCredential, \"credentialSource\")\n .setInferredCredential(inferredCredential, \"inferredCredentialSource\")\n .setCredentialHelper(\"credentialHelperSuffix\")\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockKnownCredentialRetriever,\n mockDockerCredentialHelperCredentialRetriever,\n mockInferredCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n verify(mockCredentialRetrieverFactory).known(knownCredential, \"credentialSource\");\n verify(mockCredentialRetrieverFactory).known(inferredCredential, \"inferredCredentialSource\");\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(\"docker-credential-credentialHelperSuffix\");\n }\n\n @Test\n public void testAsList_credentialHelperPath() throws IOException {\n Path fakeCredentialHelperPath = temporaryFolder.newFile(\"fake-credHelper\").toPath();\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(fakeCredentialHelperPath.toString());\n\n List retrievers = credentialRetrievers.asList();\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(fakeCredentialHelperPath.toString());\n\n Files.delete(fakeCredentialHelperPath);\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex)\n .hasMessageThat()\n .isEqualTo(\"Specified credential helper was not found: \" + fakeCredentialHelperPath);\n }\n\n @Test\n public void testDockerConfigRetrievers_undefinedHome() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(\n mockCredentialRetrieverFactory, new Properties(), new HashMap<>())\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testDockerConfigRetrievers_noDuplicateRetrievers() throws FileNotFoundException {\n properties.setProperty(\"user.home\", Paths.get(\"/env/home\").toString());\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/env/home/.docker\").toString());\n retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockEnvHomeXdgCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testCredentialHelper_cmdExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.cmd\").toPath();\n Path pathWithoutCmd = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutCmd.getParent().resolve(\"foo.cmd\"));\n\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString())\n .asList();\n\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n @Test\n public void testCredentialHelper_exeExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.exe\").toPath();\n Path pathWithoutExe = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutExe.getParent().resolve(\"foo.exe\"));\n\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString())\n .asList();\n\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-plugins-common/src/test/java/com/google/cloud/tools/jib/plugins/common/DefaultCredentialRetrieversTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 29, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "2005dc9dac601488b647062a1255381fc195d7d57d416089a04cb3e6190e37c5"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 1, "content": "package com.google.cloud.tools.jib.plugins.common;\n\nimport static com.google.common.truth.Truth.assertThat;\nimport static com.google.common.truth.Truth8.assertThat;\nimport static org.junit.Assert.assertThrows;\nimport static org.mockito.ArgumentMatchers.anyString;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\n", "meta": {"hash_id": "bb6c3b193144ee26ad7e6e2af856db9b3b9b465d0f875a909acc929b7bd703c2"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 2, "content": "import com.google.cloud.tools.jib.api.Credential;\nimport com.google.cloud.tools.jib.api.CredentialRetriever;\nimport com.google.cloud.tools.jib.frontend.CredentialRetrieverFactory;\nimport com.google.common.collect.ImmutableMap;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n/** Tests for {@link DefaultCredentialRetrievers}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class DefaultCredentialRetrieversTest {\n\n @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n", "meta": {"hash_id": "0d9d045411df5c12b00b7a96fc704cba55d27bce8f2d8f7af0ca1a2f34dec93b"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 3, "content": " @Mock private CredentialRetrieverFactory mockCredentialRetrieverFactory;\n @Mock private CredentialRetriever mockDockerCredentialHelperCredentialRetriever;\n @Mock private CredentialRetriever mockKnownCredentialRetriever;\n @Mock private CredentialRetriever mockInferredCredentialRetriever;\n @Mock private CredentialRetriever mockWellKnownCredentialHelpersCredentialRetriever;\n @Mock private CredentialRetriever mockXdgPrimaryCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeXdgCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockDockerConfigEnvLegacyDockerConfigCredentialRetriever;\n", "meta": {"hash_id": "b9ea067c1474786132dd8d58826d215fb051fee8aa181ad51997a3199427ac64"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 4, "content": " @Mock private CredentialRetriever mockSystemHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockSystemHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeKubernetesDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockEnvHomeLegacyDockerConfigCredentialRetriever;\n @Mock private CredentialRetriever mockApplicationDefaultCredentialRetriever;\n\n", "meta": {"hash_id": "f12335ea57aa85f16e42ac2906afab0af075cd0e7ac85ed2e9b0554cdcfd390a"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 5, "content": " private Properties properties;\n private Map environment;\n\n private final Credential knownCredential = Credential.from(\"username\", \"password\");\n private final Credential inferredCredential = Credential.from(\"username2\", \"password2\");\n\n @Before\n public void setUp() {\n properties = new Properties();\n properties.setProperty(\"os.name\", \"unknown\");\n properties.setProperty(\"user.home\", Paths.get(\"/system/home\").toString());\n environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/docker_config\").toString(),\n \"XDG_RUNTIME_DIR\",\n Paths.get(\"/run/user/1000\").toString(),\n \"XDG_CONFIG_HOME\",\n Paths.get(\"/env/home/.config\").toString());\n\n", "meta": {"hash_id": "965492a5f2fd985bc0ad83356fdddce8ff0d1849274e63ea72abbe5f24bc1b14"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 6, "content": " when(mockCredentialRetrieverFactory.dockerCredentialHelper(anyString()))\n .thenReturn(mockDockerCredentialHelperCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(knownCredential, \"credentialSource\"))\n .thenReturn(mockKnownCredentialRetriever);\n when(mockCredentialRetrieverFactory.known(inferredCredential, \"inferredCredentialSource\"))\n .thenReturn(mockInferredCredentialRetriever);\n when(mockCredentialRetrieverFactory.wellKnownCredentialHelpers())\n .thenReturn(mockWellKnownCredentialHelpersCredentialRetriever);\n\n", "meta": {"hash_id": "4c95cb0f3fd04bdcb6958f4a3a066d4b4e1d2030011626d92ca9e8a115c08306"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 7, "content": " when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/run/user/1000/containers/auth.json\")))\n .thenReturn(mockXdgPrimaryCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.config/containers/auth.json\")))\n .thenReturn(mockEnvHomeXdgCredentialRetriever);\n\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.config/containers/auth.json\")))\n .thenReturn(mockSystemHomeXdgCredentialRetriever);\n\n", "meta": {"hash_id": "ea39a9cdc3386887c1d83a1f9534b3f3ae638d4e3e3c126d398a51911f544fee"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 8, "content": " when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/config.json\")))\n .thenReturn(mockDockerConfigEnvDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/docker_config/.dockerconfigjson\")))\n .thenReturn(mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(Paths.get(\"/docker_config/.dockercfg\")))\n .thenReturn(mockDockerConfigEnvLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/system/home/.docker/config.json\")))\n .thenReturn(mockSystemHomeDockerConfigCredentialRetriever);\n", "meta": {"hash_id": "d6132db7cd3c0b6e5aba062bd4a977702ab73ca213fdca18cf69c8fa9d908f96"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 9, "content": " when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/system/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockSystemHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/system/home/.docker/.dockercfg\")))\n .thenReturn(mockSystemHomeLegacyDockerConfigCredentialRetriever);\n", "meta": {"hash_id": "19651a57b947f914ced45377d46eeed9a1e4ad9b13a0779180ccb684748cdf59"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 10, "content": " when(mockCredentialRetrieverFactory.dockerConfig(Paths.get(\"/env/home/.docker/config.json\")))\n .thenReturn(mockEnvHomeDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.dockerConfig(\n Paths.get(\"/env/home/.docker/.dockerconfigjson\")))\n .thenReturn(mockEnvHomeKubernetesDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.legacyDockerConfig(\n Paths.get(\"/env/home/.docker/.dockercfg\")))\n .thenReturn(mockEnvHomeLegacyDockerConfigCredentialRetriever);\n when(mockCredentialRetrieverFactory.googleApplicationDefaultCredentials())\n .thenReturn(mockApplicationDefaultCredentialRetriever);\n }\n\n", "meta": {"hash_id": "bb0b7bda9e5d7c89531635d2af732075fedbc0b140e5860d25a72fbc911dff61"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 11, "content": " @Test\n public void testAsList() throws FileNotFoundException {\n List retriever =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retriever)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n", "meta": {"hash_id": "12a9e35d06cd91fbc89418844451a6e210c81e70f0a257758bcd53ffe6a92d74"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 12, "content": " mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "meta": {"hash_id": "315fa52f304b785f319d7217db56cbdb420ca8b13b6e4c995814fa3f1d2e8f69"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 13, "content": " @Test\n public void testAsList_all() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setKnownCredential(knownCredential, \"credentialSource\")\n .setInferredCredential(inferredCredential, \"inferredCredentialSource\")\n .setCredentialHelper(\"credentialHelperSuffix\")\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockKnownCredentialRetriever,\n mockDockerCredentialHelperCredentialRetriever,\n mockInferredCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n", "meta": {"hash_id": "50125214400c0ea1df7ab9764fa6c35288de87a85e32c2e515e1b13839e12cab"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 14, "content": " mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n", "meta": {"hash_id": "f46a5cae20e5306eaa61725dd03529c1f762ab2077bad5af1cd1702286853ff1"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 15, "content": " verify(mockCredentialRetrieverFactory).known(knownCredential, \"credentialSource\");\n verify(mockCredentialRetrieverFactory).known(inferredCredential, \"inferredCredentialSource\");\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(\"docker-credential-credentialHelperSuffix\");\n }\n\n", "meta": {"hash_id": "ba9faca0cbb89f4d838491f6c2013f1bcfb2e825e3ceebde43f0bc1a45f41bc3"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 16, "content": " @Test\n public void testAsList_credentialHelperPath() throws IOException {\n Path fakeCredentialHelperPath = temporaryFolder.newFile(\"fake-credHelper\").toPath();\n DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(fakeCredentialHelperPath.toString());\n\n", "meta": {"hash_id": "5fee3f00c557c6ba06f33ad916f53823799d9aa419bc0027366e670f5f804a6a"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 17, "content": " List retrievers = credentialRetrievers.asList();\n assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n", "meta": {"hash_id": "35ac8c2dbc37c612f750a22a86c8257039063d524e7773315c8056adeaf59a34"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 18, "content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(fakeCredentialHelperPath.toString());\n\n", "meta": {"hash_id": "5c6d342cf45b768601ae025cf45a78b70666d6dcefefa1c17fa4a2ade1305ba0"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 19, "content": " Files.delete(fakeCredentialHelperPath);\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex)\n .hasMessageThat()\n .isEqualTo(\"Specified credential helper was not found: \" + fakeCredentialHelperPath);\n }\n\n @Test\n public void testDockerConfigRetrievers_undefinedHome() throws FileNotFoundException {\n List retrievers =\n new DefaultCredentialRetrievers(\n mockCredentialRetrieverFactory, new Properties(), new HashMap<>())\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "meta": {"hash_id": "03aa0e2e77899856d8f0767dad3ef33a22ecaf35e50b88cc83e16307011556e1"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 20, "content": " @Test\n public void testDockerConfigRetrievers_noDuplicateRetrievers() throws FileNotFoundException {\n properties.setProperty(\"user.home\", Paths.get(\"/env/home\").toString());\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n", "meta": {"hash_id": "f1067ed553a8825d92d16e47aa7ef1a199387dfabb6623157d50c31af122e711"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 21, "content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n\n", "meta": {"hash_id": "1fd09b33b29b958bb191599bd93420f960326e2ed4d78093cf429f75ee45f8ee"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 22, "content": " environment =\n ImmutableMap.of(\n \"HOME\",\n Paths.get(\"/env/home\").toString(),\n \"DOCKER_CONFIG\",\n Paths.get(\"/env/home/.docker\").toString());\n retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockEnvHomeXdgCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "meta": {"hash_id": "4a3ec59aec48d00bfa9257b6253641ce187d03d9452335640ddfd9a6481c0753"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 23, "content": " @Test\n public void testCredentialHelper_cmdExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.cmd\").toPath();\n Path pathWithoutCmd = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutCmd.getParent().resolve(\"foo.cmd\"));\n\n", "meta": {"hash_id": "c2012ec081c7a07f856b39475dd4c17f874a7b697d4b843e1c289d1fcf4a042a"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 24, "content": " DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutCmd.toString())\n .asList();\n\n", "meta": {"hash_id": "a1800a8d7fc791dc1b84353e6c9a97970de9e4ea43c45f8b278f9d74954ac485"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 25, "content": " assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n\n", "meta": {"hash_id": "ecdf3531c7967e2f8a9c25e32ffb5310ba0fe5403e1186661bb5fd7300ded319"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 26, "content": " @Test\n public void testCredentialHelper_exeExtension() throws IOException {\n Path credHelper = temporaryFolder.newFile(\"foo.exe\").toPath();\n Path pathWithoutExe = credHelper.getParent().resolve(\"foo\");\n assertThat(credHelper).isEqualTo(pathWithoutExe.getParent().resolve(\"foo.exe\"));\n\n", "meta": {"hash_id": "adde1b937a648a858e3e7ccefa1f5124bfd6b02f750e062428a87831387ac3fb"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 27, "content": " DefaultCredentialRetrievers credentialRetrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString());\n Exception ex = assertThrows(FileNotFoundException.class, credentialRetrievers::asList);\n assertThat(ex).hasMessageThat().startsWith(\"Specified credential helper was not found:\");\n assertThat(ex).hasMessageThat().endsWith(\"foo\");\n\n properties.setProperty(\"os.name\", \"winDOWs\");\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .setCredentialHelper(pathWithoutExe.toString())\n .asList();\n\n", "meta": {"hash_id": "6df52f2ced9f42c4552ce9b342dd81a5ad408d8d32754ef68f1d69ee45998efe"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 28, "content": " assertThat(retrievers)\n .containsExactly(\n mockDockerCredentialHelperCredentialRetriever,\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockSystemHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n }\n}\n", "meta": {"hash_id": "2c9a2cd683ab45b6ae2e27c536adb694c2c35006f2872624474f320db21b9e97"}}]}], "golden_chunks": [{"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 20, "content": " @Test\n public void testDockerConfigRetrievers_noDuplicateRetrievers() throws FileNotFoundException {\n properties.setProperty(\"user.home\", Paths.get(\"/env/home\").toString());\n List retrievers =\n new DefaultCredentialRetrievers(mockCredentialRetrieverFactory, properties, environment)\n .asList();\n assertThat(retrievers)\n .containsExactly(\n mockXdgPrimaryCredentialRetriever,\n mockEnvHomeXdgCredentialRetriever,\n mockDockerConfigEnvDockerConfigCredentialRetriever,\n", "meta": {"hash_id": "f1067ed553a8825d92d16e47aa7ef1a199387dfabb6623157d50c31af122e711"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 18, "content": " mockDockerConfigEnvKubernetesDockerConfigCredentialRetriever,\n mockDockerConfigEnvLegacyDockerConfigCredentialRetriever,\n mockSystemHomeDockerConfigCredentialRetriever,\n mockSystemHomeKubernetesDockerConfigCredentialRetriever,\n mockSystemHomeLegacyDockerConfigCredentialRetriever,\n mockEnvHomeDockerConfigCredentialRetriever,\n mockEnvHomeKubernetesDockerConfigCredentialRetriever,\n mockEnvHomeLegacyDockerConfigCredentialRetriever,\n mockWellKnownCredentialHelpersCredentialRetriever,\n mockApplicationDefaultCredentialRetriever)\n .inOrder();\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(fakeCredentialHelperPath.toString());\n\n", "meta": {"hash_id": "5c6d342cf45b768601ae025cf45a78b70666d6dcefefa1c17fa4a2ade1305ba0"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 15, "content": " verify(mockCredentialRetrieverFactory).known(knownCredential, \"credentialSource\");\n verify(mockCredentialRetrieverFactory).known(inferredCredential, \"inferredCredentialSource\");\n verify(mockCredentialRetrieverFactory)\n .dockerCredentialHelper(\"docker-credential-credentialHelperSuffix\");\n }\n\n", "meta": {"hash_id": "ba9faca0cbb89f4d838491f6c2013f1bcfb2e825e3ceebde43f0bc1a45f41bc3"}}, {"doc_uuid": "f8dd24de0db395aa7cff1e4c804eb1d10f916c9b4765d7c790b1dedfa339f913", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "2005dc9dac601488b647062a1255381fc195d7d57d416089a04cb3e6190e37c5"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-plugins-common/src/test/java/com/google/cloud/tools/jib/plugins/common/DefaultCredentialRetrieversTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 29, "source": "codebases/jsonl"}} +{"query": "What does the `ReproducibleImageTest` test class verify?", "answer": "According to the class Javadoc comment, `ReproducibleImageTest` verifies that the created image has explicit directory structures, default timestamps, permissions, and file orderings:\n/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest { ... }\n", "golden_doc_uuids": ["bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee"], "golden_chunk_uuids": [["bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", 3]], "golden_documents": [{"uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "content": "/*\n * Copyright 2019 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.api;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;\nimport com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;\nimport com.google.cloud.tools.jib.api.buildplan.FilePermissions;\nimport com.google.common.collect.ArrayListMultimap;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Multimap;\nimport com.google.common.io.CharStreams;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.time.Instant;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.BiConsumer;\nimport java.util.zip.GZIPInputStream;\nimport org.apache.commons.compress.archivers.tar.TarArchiveEntry;\nimport org.apache.commons.compress.archivers.tar.TarArchiveInputStream;\nimport org.junit.BeforeClass;\nimport org.junit.ClassRule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\n/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n imageTar = new File(imageLocation.getRoot(), \"image.tar\");\n Containerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\n Jib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n }\n\n @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n @Test\n public void testManifest() throws IOException {\n String expectedManifest =\n \"[{\\\"Config\\\":\\\"config.json\\\",\\\"RepoTags\\\":[\\\"jib-core/reproducible:latest\\\"],\"\n + \"\\\"Layers\\\":[\\\"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\\\",\\\"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\\\",\\\"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\\\"]}]\";\n String generatedManifest = extractFromTarFileAsString(imageTar, \"manifest.json\");\n assertThat(generatedManifest).isEqualTo(expectedManifest);\n }\n\n @Test\n public void testConfiguration() throws IOException {\n String expectedConfig =\n \"{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\"\n + \"\\\"config\\\":{\\\"Env\\\":[],\\\"Entrypoint\\\":[\\\"echo\\\",\\\"Hello World\\\"],\\\"ExposedPorts\\\":{},\\\"Labels\\\":{},\\\"Volumes\\\":{}},\"\n + \"\\\"history\\\":[{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"}],\"\n + \"\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:18e4f44e6d1835bd968339b166057bd17ab7d4cbb56dc7262a5cafea7cf8d405\\\",\\\"sha256:13369c34f073f2b9c1fa6431e23d925f1a8eac65b1726c8cc8fcc2596c69b414\\\",\\\"sha256:4f92c507112d7880ca0f504ef8272b7fdee107263270125036a260a741565923\\\"]}}\";\n String generatedConfig = extractFromTarFileAsString(imageTar, \"config.json\");\n assertThat(generatedConfig).isEqualTo(expectedConfig);\n }\n\n @Test\n public void testImageLayout() throws IOException {\n Set paths = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n paths.add(layerEntry.getName());\n }\n });\n assertThat(paths)\n .containsExactly(\n \"app/fileA.txt\",\n \"app/fileB.txt\",\n \"app/fileC.txt\",\n \"app/fileD.txt\",\n \"app/subdir/fileE.txt\");\n }\n\n @Test\n public void testAllFileAndDirectories() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) ->\n assertThat(layerEntry.isFile() || layerEntry.isDirectory()).isTrue());\n }\n\n @Test\n public void testTimestampsEpochPlus1s() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) -> {\n Instant modificationTime = layerEntry.getLastModifiedDate().toInstant();\n assertThat(modificationTime).isEqualTo(Instant.ofEpochSecond(1));\n });\n }\n\n @Test\n public void testPermissions() throws IOException {\n assertThat(FilePermissions.DEFAULT_FILE_PERMISSIONS.getPermissionBits()).isEqualTo(0644);\n assertThat(FilePermissions.DEFAULT_FOLDER_PERMISSIONS.getPermissionBits()).isEqualTo(0755);\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0644);\n } else if (layerEntry.isDirectory()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0755);\n }\n });\n }\n\n @Test\n public void testNoImplicitParentDirectories() throws IOException {\n Set directories = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n String entryPath = layerEntry.getName();\n if (layerEntry.isDirectory()) {\n assertThat(entryPath.endsWith(\"/\")).isTrue();\n entryPath = entryPath.substring(0, entryPath.length() - 1);\n }\n\n int lastSlashPosition = entryPath.lastIndexOf('/');\n String parent = entryPath.substring(0, Math.max(0, lastSlashPosition));\n if (!parent.isEmpty()) {\n assertThat(directories.contains(parent)).isTrue();\n }\n if (layerEntry.isDirectory()) {\n directories.add(entryPath);\n }\n });\n }\n\n @Test\n public void testFileOrdering() throws IOException {\n Multimap layerPaths = ArrayListMultimap.create();\n layerEntriesDo((layerName, layerEntry) -> layerPaths.put(layerName, layerEntry.getName()));\n for (Collection paths : layerPaths.asMap().values()) {\n List sorted = new ArrayList<>(paths);\n // ReproducibleLayerBuilder sorts by TarArchiveEntry::getName()\n Collections.sort(sorted);\n assertThat(paths).containsExactlyElementsIn(sorted).inOrder();\n }\n }\n\n private void layerEntriesDo(BiConsumer layerConsumer)\n throws IOException {\n\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n String imageEntryName = imageEntry.getName();\n // assume all .tar.gz files are layers\n if (imageEntry.isFile() && imageEntryName.endsWith(\".tar.gz\")) {\n @SuppressWarnings(\"resource\") // must not close sub-streams\n TarArchiveInputStream layer = new TarArchiveInputStream(new GZIPInputStream(input));\n TarArchiveEntry layerEntry;\n while ((layerEntry = layer.getNextTarEntry()) != null) {\n layerConsumer.accept(imageEntryName, layerEntry);\n }\n }\n }\n }\n }\n\n private static String extractFromTarFileAsString(File tarFile, String filename)\n throws IOException {\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(tarFile.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n if (filename.equals(imageEntry.getName())) {\n return CharStreams.toString(new InputStreamReader(input, StandardCharsets.UTF_8));\n }\n }\n }\n throw new AssertionError(\"file not found: \" + filename);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/integration-test/java/com/google/cloud/tools/jib/api/ReproducibleImageTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 15, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 0, "content": "/*\n * Copyright 2019 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "2d1e13dcf480a643752d3b3954afdfd2ad4151edbf6e4129b792a5e0c58724e8"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 1, "content": "package com.google.cloud.tools.jib.api;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;\nimport com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;\nimport com.google.cloud.tools.jib.api.buildplan.FilePermissions;\nimport com.google.common.collect.ArrayListMultimap;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Multimap;\nimport com.google.common.io.CharStreams;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\n", "meta": {"hash_id": "7441c460196213a9cfc800d7fa2e3a142047244dc769bf952a72062610d38e40"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 2, "content": "import java.nio.file.Path;\nimport java.time.Instant;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.BiConsumer;\nimport java.util.zip.GZIPInputStream;\nimport org.apache.commons.compress.archivers.tar.TarArchiveEntry;\nimport org.apache.commons.compress.archivers.tar.TarArchiveInputStream;\nimport org.junit.BeforeClass;\nimport org.junit.ClassRule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\n", "meta": {"hash_id": "ceab2a104829864afd7c7713404be4b29a3be8e3868d00c689d1daf34b8b140a"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 3, "content": "/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n", "meta": {"hash_id": "995ea5efd087b5ba76509c7f959484d0c63cda3e86bb036f87edcbbb63d7462d"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 4, "content": " imageTar = new File(imageLocation.getRoot(), \"image.tar\");\n Containerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\n Jib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n }\n\n", "meta": {"hash_id": "46706c9cde149ce8dea9155cb50c022a3354087c38ea72250ee08db1e0a21b58"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 5, "content": " @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n", "meta": {"hash_id": "8b029daabb3dd4c353c54e4a4f05f43865e1775199aa6d2faece7d3e7a165b71"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 6, "content": " @Test\n public void testManifest() throws IOException {\n String expectedManifest =\n \"[{\\\"Config\\\":\\\"config.json\\\",\\\"RepoTags\\\":[\\\"jib-core/reproducible:latest\\\"],\"\n + \"\\\"Layers\\\":[\\\"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\\\",\\\"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\\\",\\\"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\\\"]}]\";\n String generatedManifest = extractFromTarFileAsString(imageTar, \"manifest.json\");\n assertThat(generatedManifest).isEqualTo(expectedManifest);\n }\n\n", "meta": {"hash_id": "91b6f133d3e10b844f7b33a1129a7341065e447898b088968569c666fb0bf9aa"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 7, "content": " @Test\n public void testConfiguration() throws IOException {\n String expectedConfig =\n \"{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\"\n + \"\\\"config\\\":{\\\"Env\\\":[],\\\"Entrypoint\\\":[\\\"echo\\\",\\\"Hello World\\\"],\\\"ExposedPorts\\\":{},\\\"Labels\\\":{},\\\"Volumes\\\":{}},\"\n + \"\\\"history\\\":[{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"}],\"\n", "meta": {"hash_id": "fa188fe7c1ff1a0cd34d5fe366f51756f137c9d50deaf5de97472c4813c242d5"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 8, "content": " + \"\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:18e4f44e6d1835bd968339b166057bd17ab7d4cbb56dc7262a5cafea7cf8d405\\\",\\\"sha256:13369c34f073f2b9c1fa6431e23d925f1a8eac65b1726c8cc8fcc2596c69b414\\\",\\\"sha256:4f92c507112d7880ca0f504ef8272b7fdee107263270125036a260a741565923\\\"]}}\";\n String generatedConfig = extractFromTarFileAsString(imageTar, \"config.json\");\n assertThat(generatedConfig).isEqualTo(expectedConfig);\n }\n\n @Test\n public void testImageLayout() throws IOException {\n Set paths = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n paths.add(layerEntry.getName());\n }\n });\n assertThat(paths)\n .containsExactly(\n \"app/fileA.txt\",\n \"app/fileB.txt\",\n \"app/fileC.txt\",\n \"app/fileD.txt\",\n \"app/subdir/fileE.txt\");\n }\n\n", "meta": {"hash_id": "1bae4e592af7a63228425e47c4e3e56667083b95551cc00c69074d33bb82e9b5"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 9, "content": " @Test\n public void testAllFileAndDirectories() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) ->\n assertThat(layerEntry.isFile() || layerEntry.isDirectory()).isTrue());\n }\n\n @Test\n public void testTimestampsEpochPlus1s() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) -> {\n Instant modificationTime = layerEntry.getLastModifiedDate().toInstant();\n assertThat(modificationTime).isEqualTo(Instant.ofEpochSecond(1));\n });\n }\n\n", "meta": {"hash_id": "3d86382d60c875f500e6179c141de84887097b87fc0234bce77dcaa6bf25eec1"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 10, "content": " @Test\n public void testPermissions() throws IOException {\n assertThat(FilePermissions.DEFAULT_FILE_PERMISSIONS.getPermissionBits()).isEqualTo(0644);\n assertThat(FilePermissions.DEFAULT_FOLDER_PERMISSIONS.getPermissionBits()).isEqualTo(0755);\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0644);\n } else if (layerEntry.isDirectory()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0755);\n }\n });\n }\n\n", "meta": {"hash_id": "e44f42996501818737c06a785d168301b1cc9d4ceb62461c5da49aef91cb43ca"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 11, "content": " @Test\n public void testNoImplicitParentDirectories() throws IOException {\n Set directories = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n String entryPath = layerEntry.getName();\n if (layerEntry.isDirectory()) {\n assertThat(entryPath.endsWith(\"/\")).isTrue();\n entryPath = entryPath.substring(0, entryPath.length() - 1);\n }\n\n int lastSlashPosition = entryPath.lastIndexOf('/');\n String parent = entryPath.substring(0, Math.max(0, lastSlashPosition));\n if (!parent.isEmpty()) {\n assertThat(directories.contains(parent)).isTrue();\n }\n if (layerEntry.isDirectory()) {\n directories.add(entryPath);\n }\n });\n }\n\n", "meta": {"hash_id": "5c7f2c55312af527e75eb585d5e8367694742546b1d325f8c4c6e85f7e20a578"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 12, "content": " @Test\n public void testFileOrdering() throws IOException {\n Multimap layerPaths = ArrayListMultimap.create();\n layerEntriesDo((layerName, layerEntry) -> layerPaths.put(layerName, layerEntry.getName()));\n for (Collection paths : layerPaths.asMap().values()) {\n List sorted = new ArrayList<>(paths);\n // ReproducibleLayerBuilder sorts by TarArchiveEntry::getName()\n Collections.sort(sorted);\n assertThat(paths).containsExactlyElementsIn(sorted).inOrder();\n }\n }\n\n private void layerEntriesDo(BiConsumer layerConsumer)\n throws IOException {\n\n", "meta": {"hash_id": "0c814790c1fa239d965e7be9c7ee0d0ecb97e221141c91887ed4e98b0b4f843e"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 13, "content": " try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n String imageEntryName = imageEntry.getName();\n // assume all .tar.gz files are layers\n if (imageEntry.isFile() && imageEntryName.endsWith(\".tar.gz\")) {\n @SuppressWarnings(\"resource\") // must not close sub-streams\n TarArchiveInputStream layer = new TarArchiveInputStream(new GZIPInputStream(input));\n TarArchiveEntry layerEntry;\n while ((layerEntry = layer.getNextTarEntry()) != null) {\n layerConsumer.accept(imageEntryName, layerEntry);\n }\n }\n }\n }\n }\n\n", "meta": {"hash_id": "ae852a40ebdd8ad26fa422ae8173a6068e80014007a8018277eed5b8e289327c"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 14, "content": " private static String extractFromTarFileAsString(File tarFile, String filename)\n throws IOException {\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(tarFile.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n if (filename.equals(imageEntry.getName())) {\n return CharStreams.toString(new InputStreamReader(input, StandardCharsets.UTF_8));\n }\n }\n }\n throw new AssertionError(\"file not found: \" + filename);\n }\n}\n", "meta": {"hash_id": "99912deb7335238dedadcdf3cbc33f14ad2c49beedfe049bae9316f8c017dd6e"}}]}], "golden_chunks": [{"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 3, "content": "/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n", "meta": {"hash_id": "995ea5efd087b5ba76509c7f959484d0c63cda3e86bb036f87edcbbb63d7462d"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/integration-test/java/com/google/cloud/tools/jib/api/ReproducibleImageTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 15, "source": "codebases/jsonl"}} +{"query": "How does the `createImage()` method create the test image?", "answer": "The `createImage()` method performs the following steps to create the test image:\n1. Creates a temporary directory using `TemporaryFolder` and creates several test files and directories within it.\n2. Configures a `Containerizer` to build a tarball image named \"jib-core/reproducible\" using `TarImage`.\n3. Builds a container image using `Jib.fromScratch()` by setting the entrypoint, adding layers with the test files, and containerizing with the configured `Containerizer`.\n\nContainerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\nJib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n", "golden_doc_uuids": ["bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee"], "golden_chunk_uuids": [["bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", 3]], "golden_documents": [{"uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "content": "/*\n * Copyright 2019 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.api;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;\nimport com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;\nimport com.google.cloud.tools.jib.api.buildplan.FilePermissions;\nimport com.google.common.collect.ArrayListMultimap;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Multimap;\nimport com.google.common.io.CharStreams;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.time.Instant;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.BiConsumer;\nimport java.util.zip.GZIPInputStream;\nimport org.apache.commons.compress.archivers.tar.TarArchiveEntry;\nimport org.apache.commons.compress.archivers.tar.TarArchiveInputStream;\nimport org.junit.BeforeClass;\nimport org.junit.ClassRule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\n/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n imageTar = new File(imageLocation.getRoot(), \"image.tar\");\n Containerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\n Jib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n }\n\n @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n @Test\n public void testManifest() throws IOException {\n String expectedManifest =\n \"[{\\\"Config\\\":\\\"config.json\\\",\\\"RepoTags\\\":[\\\"jib-core/reproducible:latest\\\"],\"\n + \"\\\"Layers\\\":[\\\"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\\\",\\\"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\\\",\\\"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\\\"]}]\";\n String generatedManifest = extractFromTarFileAsString(imageTar, \"manifest.json\");\n assertThat(generatedManifest).isEqualTo(expectedManifest);\n }\n\n @Test\n public void testConfiguration() throws IOException {\n String expectedConfig =\n \"{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\"\n + \"\\\"config\\\":{\\\"Env\\\":[],\\\"Entrypoint\\\":[\\\"echo\\\",\\\"Hello World\\\"],\\\"ExposedPorts\\\":{},\\\"Labels\\\":{},\\\"Volumes\\\":{}},\"\n + \"\\\"history\\\":[{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"}],\"\n + \"\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:18e4f44e6d1835bd968339b166057bd17ab7d4cbb56dc7262a5cafea7cf8d405\\\",\\\"sha256:13369c34f073f2b9c1fa6431e23d925f1a8eac65b1726c8cc8fcc2596c69b414\\\",\\\"sha256:4f92c507112d7880ca0f504ef8272b7fdee107263270125036a260a741565923\\\"]}}\";\n String generatedConfig = extractFromTarFileAsString(imageTar, \"config.json\");\n assertThat(generatedConfig).isEqualTo(expectedConfig);\n }\n\n @Test\n public void testImageLayout() throws IOException {\n Set paths = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n paths.add(layerEntry.getName());\n }\n });\n assertThat(paths)\n .containsExactly(\n \"app/fileA.txt\",\n \"app/fileB.txt\",\n \"app/fileC.txt\",\n \"app/fileD.txt\",\n \"app/subdir/fileE.txt\");\n }\n\n @Test\n public void testAllFileAndDirectories() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) ->\n assertThat(layerEntry.isFile() || layerEntry.isDirectory()).isTrue());\n }\n\n @Test\n public void testTimestampsEpochPlus1s() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) -> {\n Instant modificationTime = layerEntry.getLastModifiedDate().toInstant();\n assertThat(modificationTime).isEqualTo(Instant.ofEpochSecond(1));\n });\n }\n\n @Test\n public void testPermissions() throws IOException {\n assertThat(FilePermissions.DEFAULT_FILE_PERMISSIONS.getPermissionBits()).isEqualTo(0644);\n assertThat(FilePermissions.DEFAULT_FOLDER_PERMISSIONS.getPermissionBits()).isEqualTo(0755);\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0644);\n } else if (layerEntry.isDirectory()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0755);\n }\n });\n }\n\n @Test\n public void testNoImplicitParentDirectories() throws IOException {\n Set directories = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n String entryPath = layerEntry.getName();\n if (layerEntry.isDirectory()) {\n assertThat(entryPath.endsWith(\"/\")).isTrue();\n entryPath = entryPath.substring(0, entryPath.length() - 1);\n }\n\n int lastSlashPosition = entryPath.lastIndexOf('/');\n String parent = entryPath.substring(0, Math.max(0, lastSlashPosition));\n if (!parent.isEmpty()) {\n assertThat(directories.contains(parent)).isTrue();\n }\n if (layerEntry.isDirectory()) {\n directories.add(entryPath);\n }\n });\n }\n\n @Test\n public void testFileOrdering() throws IOException {\n Multimap layerPaths = ArrayListMultimap.create();\n layerEntriesDo((layerName, layerEntry) -> layerPaths.put(layerName, layerEntry.getName()));\n for (Collection paths : layerPaths.asMap().values()) {\n List sorted = new ArrayList<>(paths);\n // ReproducibleLayerBuilder sorts by TarArchiveEntry::getName()\n Collections.sort(sorted);\n assertThat(paths).containsExactlyElementsIn(sorted).inOrder();\n }\n }\n\n private void layerEntriesDo(BiConsumer layerConsumer)\n throws IOException {\n\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n String imageEntryName = imageEntry.getName();\n // assume all .tar.gz files are layers\n if (imageEntry.isFile() && imageEntryName.endsWith(\".tar.gz\")) {\n @SuppressWarnings(\"resource\") // must not close sub-streams\n TarArchiveInputStream layer = new TarArchiveInputStream(new GZIPInputStream(input));\n TarArchiveEntry layerEntry;\n while ((layerEntry = layer.getNextTarEntry()) != null) {\n layerConsumer.accept(imageEntryName, layerEntry);\n }\n }\n }\n }\n }\n\n private static String extractFromTarFileAsString(File tarFile, String filename)\n throws IOException {\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(tarFile.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n if (filename.equals(imageEntry.getName())) {\n return CharStreams.toString(new InputStreamReader(input, StandardCharsets.UTF_8));\n }\n }\n }\n throw new AssertionError(\"file not found: \" + filename);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/integration-test/java/com/google/cloud/tools/jib/api/ReproducibleImageTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 15, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 0, "content": "/*\n * Copyright 2019 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "2d1e13dcf480a643752d3b3954afdfd2ad4151edbf6e4129b792a5e0c58724e8"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 1, "content": "package com.google.cloud.tools.jib.api;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;\nimport com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;\nimport com.google.cloud.tools.jib.api.buildplan.FilePermissions;\nimport com.google.common.collect.ArrayListMultimap;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Multimap;\nimport com.google.common.io.CharStreams;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\n", "meta": {"hash_id": "7441c460196213a9cfc800d7fa2e3a142047244dc769bf952a72062610d38e40"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 2, "content": "import java.nio.file.Path;\nimport java.time.Instant;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.BiConsumer;\nimport java.util.zip.GZIPInputStream;\nimport org.apache.commons.compress.archivers.tar.TarArchiveEntry;\nimport org.apache.commons.compress.archivers.tar.TarArchiveInputStream;\nimport org.junit.BeforeClass;\nimport org.junit.ClassRule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\n", "meta": {"hash_id": "ceab2a104829864afd7c7713404be4b29a3be8e3868d00c689d1daf34b8b140a"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 3, "content": "/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n", "meta": {"hash_id": "995ea5efd087b5ba76509c7f959484d0c63cda3e86bb036f87edcbbb63d7462d"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 4, "content": " imageTar = new File(imageLocation.getRoot(), \"image.tar\");\n Containerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\n Jib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n }\n\n", "meta": {"hash_id": "46706c9cde149ce8dea9155cb50c022a3354087c38ea72250ee08db1e0a21b58"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 5, "content": " @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n", "meta": {"hash_id": "8b029daabb3dd4c353c54e4a4f05f43865e1775199aa6d2faece7d3e7a165b71"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 6, "content": " @Test\n public void testManifest() throws IOException {\n String expectedManifest =\n \"[{\\\"Config\\\":\\\"config.json\\\",\\\"RepoTags\\\":[\\\"jib-core/reproducible:latest\\\"],\"\n + \"\\\"Layers\\\":[\\\"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\\\",\\\"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\\\",\\\"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\\\"]}]\";\n String generatedManifest = extractFromTarFileAsString(imageTar, \"manifest.json\");\n assertThat(generatedManifest).isEqualTo(expectedManifest);\n }\n\n", "meta": {"hash_id": "91b6f133d3e10b844f7b33a1129a7341065e447898b088968569c666fb0bf9aa"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 7, "content": " @Test\n public void testConfiguration() throws IOException {\n String expectedConfig =\n \"{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\"\n + \"\\\"config\\\":{\\\"Env\\\":[],\\\"Entrypoint\\\":[\\\"echo\\\",\\\"Hello World\\\"],\\\"ExposedPorts\\\":{},\\\"Labels\\\":{},\\\"Volumes\\\":{}},\"\n + \"\\\"history\\\":[{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"}],\"\n", "meta": {"hash_id": "fa188fe7c1ff1a0cd34d5fe366f51756f137c9d50deaf5de97472c4813c242d5"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 8, "content": " + \"\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:18e4f44e6d1835bd968339b166057bd17ab7d4cbb56dc7262a5cafea7cf8d405\\\",\\\"sha256:13369c34f073f2b9c1fa6431e23d925f1a8eac65b1726c8cc8fcc2596c69b414\\\",\\\"sha256:4f92c507112d7880ca0f504ef8272b7fdee107263270125036a260a741565923\\\"]}}\";\n String generatedConfig = extractFromTarFileAsString(imageTar, \"config.json\");\n assertThat(generatedConfig).isEqualTo(expectedConfig);\n }\n\n @Test\n public void testImageLayout() throws IOException {\n Set paths = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n paths.add(layerEntry.getName());\n }\n });\n assertThat(paths)\n .containsExactly(\n \"app/fileA.txt\",\n \"app/fileB.txt\",\n \"app/fileC.txt\",\n \"app/fileD.txt\",\n \"app/subdir/fileE.txt\");\n }\n\n", "meta": {"hash_id": "1bae4e592af7a63228425e47c4e3e56667083b95551cc00c69074d33bb82e9b5"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 9, "content": " @Test\n public void testAllFileAndDirectories() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) ->\n assertThat(layerEntry.isFile() || layerEntry.isDirectory()).isTrue());\n }\n\n @Test\n public void testTimestampsEpochPlus1s() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) -> {\n Instant modificationTime = layerEntry.getLastModifiedDate().toInstant();\n assertThat(modificationTime).isEqualTo(Instant.ofEpochSecond(1));\n });\n }\n\n", "meta": {"hash_id": "3d86382d60c875f500e6179c141de84887097b87fc0234bce77dcaa6bf25eec1"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 10, "content": " @Test\n public void testPermissions() throws IOException {\n assertThat(FilePermissions.DEFAULT_FILE_PERMISSIONS.getPermissionBits()).isEqualTo(0644);\n assertThat(FilePermissions.DEFAULT_FOLDER_PERMISSIONS.getPermissionBits()).isEqualTo(0755);\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0644);\n } else if (layerEntry.isDirectory()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0755);\n }\n });\n }\n\n", "meta": {"hash_id": "e44f42996501818737c06a785d168301b1cc9d4ceb62461c5da49aef91cb43ca"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 11, "content": " @Test\n public void testNoImplicitParentDirectories() throws IOException {\n Set directories = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n String entryPath = layerEntry.getName();\n if (layerEntry.isDirectory()) {\n assertThat(entryPath.endsWith(\"/\")).isTrue();\n entryPath = entryPath.substring(0, entryPath.length() - 1);\n }\n\n int lastSlashPosition = entryPath.lastIndexOf('/');\n String parent = entryPath.substring(0, Math.max(0, lastSlashPosition));\n if (!parent.isEmpty()) {\n assertThat(directories.contains(parent)).isTrue();\n }\n if (layerEntry.isDirectory()) {\n directories.add(entryPath);\n }\n });\n }\n\n", "meta": {"hash_id": "5c7f2c55312af527e75eb585d5e8367694742546b1d325f8c4c6e85f7e20a578"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 12, "content": " @Test\n public void testFileOrdering() throws IOException {\n Multimap layerPaths = ArrayListMultimap.create();\n layerEntriesDo((layerName, layerEntry) -> layerPaths.put(layerName, layerEntry.getName()));\n for (Collection paths : layerPaths.asMap().values()) {\n List sorted = new ArrayList<>(paths);\n // ReproducibleLayerBuilder sorts by TarArchiveEntry::getName()\n Collections.sort(sorted);\n assertThat(paths).containsExactlyElementsIn(sorted).inOrder();\n }\n }\n\n private void layerEntriesDo(BiConsumer layerConsumer)\n throws IOException {\n\n", "meta": {"hash_id": "0c814790c1fa239d965e7be9c7ee0d0ecb97e221141c91887ed4e98b0b4f843e"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 13, "content": " try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n String imageEntryName = imageEntry.getName();\n // assume all .tar.gz files are layers\n if (imageEntry.isFile() && imageEntryName.endsWith(\".tar.gz\")) {\n @SuppressWarnings(\"resource\") // must not close sub-streams\n TarArchiveInputStream layer = new TarArchiveInputStream(new GZIPInputStream(input));\n TarArchiveEntry layerEntry;\n while ((layerEntry = layer.getNextTarEntry()) != null) {\n layerConsumer.accept(imageEntryName, layerEntry);\n }\n }\n }\n }\n }\n\n", "meta": {"hash_id": "ae852a40ebdd8ad26fa422ae8173a6068e80014007a8018277eed5b8e289327c"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 14, "content": " private static String extractFromTarFileAsString(File tarFile, String filename)\n throws IOException {\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(tarFile.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n if (filename.equals(imageEntry.getName())) {\n return CharStreams.toString(new InputStreamReader(input, StandardCharsets.UTF_8));\n }\n }\n }\n throw new AssertionError(\"file not found: \" + filename);\n }\n}\n", "meta": {"hash_id": "99912deb7335238dedadcdf3cbc33f14ad2c49beedfe049bae9316f8c017dd6e"}}]}], "golden_chunks": [{"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 3, "content": "/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n", "meta": {"hash_id": "995ea5efd087b5ba76509c7f959484d0c63cda3e86bb036f87edcbbb63d7462d"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/integration-test/java/com/google/cloud/tools/jib/api/ReproducibleImageTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 15, "source": "codebases/jsonl"}} +{"query": "What is the expected tarball structure and how is it verified in `testTarballStructure()`?", "answer": "The `testTarballStructure()` method verifies the expected tarball structure by reading the generated `image.tar` file and asserting that it contains the following entries in order:\n\nassertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n", "golden_doc_uuids": ["bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee"], "golden_chunk_uuids": [["bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", 5]], "golden_documents": [{"uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "content": "/*\n * Copyright 2019 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.api;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;\nimport com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;\nimport com.google.cloud.tools.jib.api.buildplan.FilePermissions;\nimport com.google.common.collect.ArrayListMultimap;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Multimap;\nimport com.google.common.io.CharStreams;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.time.Instant;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.BiConsumer;\nimport java.util.zip.GZIPInputStream;\nimport org.apache.commons.compress.archivers.tar.TarArchiveEntry;\nimport org.apache.commons.compress.archivers.tar.TarArchiveInputStream;\nimport org.junit.BeforeClass;\nimport org.junit.ClassRule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\n/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n imageTar = new File(imageLocation.getRoot(), \"image.tar\");\n Containerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\n Jib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n }\n\n @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n @Test\n public void testManifest() throws IOException {\n String expectedManifest =\n \"[{\\\"Config\\\":\\\"config.json\\\",\\\"RepoTags\\\":[\\\"jib-core/reproducible:latest\\\"],\"\n + \"\\\"Layers\\\":[\\\"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\\\",\\\"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\\\",\\\"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\\\"]}]\";\n String generatedManifest = extractFromTarFileAsString(imageTar, \"manifest.json\");\n assertThat(generatedManifest).isEqualTo(expectedManifest);\n }\n\n @Test\n public void testConfiguration() throws IOException {\n String expectedConfig =\n \"{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\"\n + \"\\\"config\\\":{\\\"Env\\\":[],\\\"Entrypoint\\\":[\\\"echo\\\",\\\"Hello World\\\"],\\\"ExposedPorts\\\":{},\\\"Labels\\\":{},\\\"Volumes\\\":{}},\"\n + \"\\\"history\\\":[{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"}],\"\n + \"\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:18e4f44e6d1835bd968339b166057bd17ab7d4cbb56dc7262a5cafea7cf8d405\\\",\\\"sha256:13369c34f073f2b9c1fa6431e23d925f1a8eac65b1726c8cc8fcc2596c69b414\\\",\\\"sha256:4f92c507112d7880ca0f504ef8272b7fdee107263270125036a260a741565923\\\"]}}\";\n String generatedConfig = extractFromTarFileAsString(imageTar, \"config.json\");\n assertThat(generatedConfig).isEqualTo(expectedConfig);\n }\n\n @Test\n public void testImageLayout() throws IOException {\n Set paths = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n paths.add(layerEntry.getName());\n }\n });\n assertThat(paths)\n .containsExactly(\n \"app/fileA.txt\",\n \"app/fileB.txt\",\n \"app/fileC.txt\",\n \"app/fileD.txt\",\n \"app/subdir/fileE.txt\");\n }\n\n @Test\n public void testAllFileAndDirectories() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) ->\n assertThat(layerEntry.isFile() || layerEntry.isDirectory()).isTrue());\n }\n\n @Test\n public void testTimestampsEpochPlus1s() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) -> {\n Instant modificationTime = layerEntry.getLastModifiedDate().toInstant();\n assertThat(modificationTime).isEqualTo(Instant.ofEpochSecond(1));\n });\n }\n\n @Test\n public void testPermissions() throws IOException {\n assertThat(FilePermissions.DEFAULT_FILE_PERMISSIONS.getPermissionBits()).isEqualTo(0644);\n assertThat(FilePermissions.DEFAULT_FOLDER_PERMISSIONS.getPermissionBits()).isEqualTo(0755);\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0644);\n } else if (layerEntry.isDirectory()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0755);\n }\n });\n }\n\n @Test\n public void testNoImplicitParentDirectories() throws IOException {\n Set directories = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n String entryPath = layerEntry.getName();\n if (layerEntry.isDirectory()) {\n assertThat(entryPath.endsWith(\"/\")).isTrue();\n entryPath = entryPath.substring(0, entryPath.length() - 1);\n }\n\n int lastSlashPosition = entryPath.lastIndexOf('/');\n String parent = entryPath.substring(0, Math.max(0, lastSlashPosition));\n if (!parent.isEmpty()) {\n assertThat(directories.contains(parent)).isTrue();\n }\n if (layerEntry.isDirectory()) {\n directories.add(entryPath);\n }\n });\n }\n\n @Test\n public void testFileOrdering() throws IOException {\n Multimap layerPaths = ArrayListMultimap.create();\n layerEntriesDo((layerName, layerEntry) -> layerPaths.put(layerName, layerEntry.getName()));\n for (Collection paths : layerPaths.asMap().values()) {\n List sorted = new ArrayList<>(paths);\n // ReproducibleLayerBuilder sorts by TarArchiveEntry::getName()\n Collections.sort(sorted);\n assertThat(paths).containsExactlyElementsIn(sorted).inOrder();\n }\n }\n\n private void layerEntriesDo(BiConsumer layerConsumer)\n throws IOException {\n\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n String imageEntryName = imageEntry.getName();\n // assume all .tar.gz files are layers\n if (imageEntry.isFile() && imageEntryName.endsWith(\".tar.gz\")) {\n @SuppressWarnings(\"resource\") // must not close sub-streams\n TarArchiveInputStream layer = new TarArchiveInputStream(new GZIPInputStream(input));\n TarArchiveEntry layerEntry;\n while ((layerEntry = layer.getNextTarEntry()) != null) {\n layerConsumer.accept(imageEntryName, layerEntry);\n }\n }\n }\n }\n }\n\n private static String extractFromTarFileAsString(File tarFile, String filename)\n throws IOException {\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(tarFile.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n if (filename.equals(imageEntry.getName())) {\n return CharStreams.toString(new InputStreamReader(input, StandardCharsets.UTF_8));\n }\n }\n }\n throw new AssertionError(\"file not found: \" + filename);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/integration-test/java/com/google/cloud/tools/jib/api/ReproducibleImageTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 15, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 0, "content": "/*\n * Copyright 2019 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "2d1e13dcf480a643752d3b3954afdfd2ad4151edbf6e4129b792a5e0c58724e8"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 1, "content": "package com.google.cloud.tools.jib.api;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;\nimport com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;\nimport com.google.cloud.tools.jib.api.buildplan.FilePermissions;\nimport com.google.common.collect.ArrayListMultimap;\nimport com.google.common.collect.ImmutableList;\nimport com.google.common.collect.Multimap;\nimport com.google.common.io.CharStreams;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\n", "meta": {"hash_id": "7441c460196213a9cfc800d7fa2e3a142047244dc769bf952a72062610d38e40"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 2, "content": "import java.nio.file.Path;\nimport java.time.Instant;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport java.util.function.BiConsumer;\nimport java.util.zip.GZIPInputStream;\nimport org.apache.commons.compress.archivers.tar.TarArchiveEntry;\nimport org.apache.commons.compress.archivers.tar.TarArchiveInputStream;\nimport org.junit.BeforeClass;\nimport org.junit.ClassRule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\n", "meta": {"hash_id": "ceab2a104829864afd7c7713404be4b29a3be8e3868d00c689d1daf34b8b140a"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 3, "content": "/**\n * Verify that created image has explicit directory structures, default timestamps, permissions, and\n * file orderings.\n */\npublic class ReproducibleImageTest {\n\n @ClassRule public static final TemporaryFolder imageLocation = new TemporaryFolder();\n\n private static File imageTar;\n\n @BeforeClass\n public static void createImage()\n throws InvalidImageReferenceException, InterruptedException, CacheDirectoryCreationException,\n IOException, RegistryException, ExecutionException {\n\n Path root = imageLocation.getRoot().toPath();\n Path fileA = Files.createFile(root.resolve(\"fileA.txt\"));\n Path fileB = Files.createFile(root.resolve(\"fileB.txt\"));\n Path fileC = Files.createFile(root.resolve(\"fileC.txt\"));\n Path subdir = Files.createDirectory(root.resolve(\"dir\"));\n Path subsubdir = Files.createDirectory(subdir.resolve(\"subdir\"));\n Files.createFile(subdir.resolve(\"fileD.txt\"));\n Files.createFile(subsubdir.resolve(\"fileE.txt\"));\n\n", "meta": {"hash_id": "995ea5efd087b5ba76509c7f959484d0c63cda3e86bb036f87edcbbb63d7462d"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 4, "content": " imageTar = new File(imageLocation.getRoot(), \"image.tar\");\n Containerizer containerizer =\n Containerizer.to(TarImage.at(imageTar.toPath()).named(\"jib-core/reproducible\"));\n\n Jib.fromScratch()\n .setEntrypoint(\"echo\", \"Hello World\")\n .addLayer(ImmutableList.of(fileA), AbsoluteUnixPath.get(\"/app\"))\n // layer with out-of-order files\n .addLayer(ImmutableList.of(fileC, fileB), \"/app\")\n .addFileEntriesLayer(\n FileEntriesLayer.builder()\n .addEntryRecursive(subdir, AbsoluteUnixPath.get(\"/app\"))\n .build())\n .containerize(containerizer);\n }\n\n", "meta": {"hash_id": "46706c9cde149ce8dea9155cb50c022a3354087c38ea72250ee08db1e0a21b58"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 5, "content": " @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n", "meta": {"hash_id": "8b029daabb3dd4c353c54e4a4f05f43865e1775199aa6d2faece7d3e7a165b71"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 6, "content": " @Test\n public void testManifest() throws IOException {\n String expectedManifest =\n \"[{\\\"Config\\\":\\\"config.json\\\",\\\"RepoTags\\\":[\\\"jib-core/reproducible:latest\\\"],\"\n + \"\\\"Layers\\\":[\\\"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\\\",\\\"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\\\",\\\"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\\\"]}]\";\n String generatedManifest = extractFromTarFileAsString(imageTar, \"manifest.json\");\n assertThat(generatedManifest).isEqualTo(expectedManifest);\n }\n\n", "meta": {"hash_id": "91b6f133d3e10b844f7b33a1129a7341065e447898b088968569c666fb0bf9aa"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 7, "content": " @Test\n public void testConfiguration() throws IOException {\n String expectedConfig =\n \"{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"architecture\\\":\\\"amd64\\\",\\\"os\\\":\\\"linux\\\",\"\n + \"\\\"config\\\":{\\\"Env\\\":[],\\\"Entrypoint\\\":[\\\"echo\\\",\\\"Hello World\\\"],\\\"ExposedPorts\\\":{},\\\"Labels\\\":{},\\\"Volumes\\\":{}},\"\n + \"\\\"history\\\":[{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"},{\\\"created\\\":\\\"1970-01-01T00:00:00Z\\\",\\\"author\\\":\\\"Jib\\\",\\\"created_by\\\":\\\"jib-core:null\\\",\\\"comment\\\":\\\"\\\"}],\"\n", "meta": {"hash_id": "fa188fe7c1ff1a0cd34d5fe366f51756f137c9d50deaf5de97472c4813c242d5"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 8, "content": " + \"\\\"rootfs\\\":{\\\"type\\\":\\\"layers\\\",\\\"diff_ids\\\":[\\\"sha256:18e4f44e6d1835bd968339b166057bd17ab7d4cbb56dc7262a5cafea7cf8d405\\\",\\\"sha256:13369c34f073f2b9c1fa6431e23d925f1a8eac65b1726c8cc8fcc2596c69b414\\\",\\\"sha256:4f92c507112d7880ca0f504ef8272b7fdee107263270125036a260a741565923\\\"]}}\";\n String generatedConfig = extractFromTarFileAsString(imageTar, \"config.json\");\n assertThat(generatedConfig).isEqualTo(expectedConfig);\n }\n\n @Test\n public void testImageLayout() throws IOException {\n Set paths = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n paths.add(layerEntry.getName());\n }\n });\n assertThat(paths)\n .containsExactly(\n \"app/fileA.txt\",\n \"app/fileB.txt\",\n \"app/fileC.txt\",\n \"app/fileD.txt\",\n \"app/subdir/fileE.txt\");\n }\n\n", "meta": {"hash_id": "1bae4e592af7a63228425e47c4e3e56667083b95551cc00c69074d33bb82e9b5"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 9, "content": " @Test\n public void testAllFileAndDirectories() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) ->\n assertThat(layerEntry.isFile() || layerEntry.isDirectory()).isTrue());\n }\n\n @Test\n public void testTimestampsEpochPlus1s() throws IOException {\n layerEntriesDo(\n (layerName, layerEntry) -> {\n Instant modificationTime = layerEntry.getLastModifiedDate().toInstant();\n assertThat(modificationTime).isEqualTo(Instant.ofEpochSecond(1));\n });\n }\n\n", "meta": {"hash_id": "3d86382d60c875f500e6179c141de84887097b87fc0234bce77dcaa6bf25eec1"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 10, "content": " @Test\n public void testPermissions() throws IOException {\n assertThat(FilePermissions.DEFAULT_FILE_PERMISSIONS.getPermissionBits()).isEqualTo(0644);\n assertThat(FilePermissions.DEFAULT_FOLDER_PERMISSIONS.getPermissionBits()).isEqualTo(0755);\n layerEntriesDo(\n (layerName, layerEntry) -> {\n if (layerEntry.isFile()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0644);\n } else if (layerEntry.isDirectory()) {\n assertThat(layerEntry.getMode() & 0777).isEqualTo(0755);\n }\n });\n }\n\n", "meta": {"hash_id": "e44f42996501818737c06a785d168301b1cc9d4ceb62461c5da49aef91cb43ca"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 11, "content": " @Test\n public void testNoImplicitParentDirectories() throws IOException {\n Set directories = new HashSet<>();\n layerEntriesDo(\n (layerName, layerEntry) -> {\n String entryPath = layerEntry.getName();\n if (layerEntry.isDirectory()) {\n assertThat(entryPath.endsWith(\"/\")).isTrue();\n entryPath = entryPath.substring(0, entryPath.length() - 1);\n }\n\n int lastSlashPosition = entryPath.lastIndexOf('/');\n String parent = entryPath.substring(0, Math.max(0, lastSlashPosition));\n if (!parent.isEmpty()) {\n assertThat(directories.contains(parent)).isTrue();\n }\n if (layerEntry.isDirectory()) {\n directories.add(entryPath);\n }\n });\n }\n\n", "meta": {"hash_id": "5c7f2c55312af527e75eb585d5e8367694742546b1d325f8c4c6e85f7e20a578"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 12, "content": " @Test\n public void testFileOrdering() throws IOException {\n Multimap layerPaths = ArrayListMultimap.create();\n layerEntriesDo((layerName, layerEntry) -> layerPaths.put(layerName, layerEntry.getName()));\n for (Collection paths : layerPaths.asMap().values()) {\n List sorted = new ArrayList<>(paths);\n // ReproducibleLayerBuilder sorts by TarArchiveEntry::getName()\n Collections.sort(sorted);\n assertThat(paths).containsExactlyElementsIn(sorted).inOrder();\n }\n }\n\n private void layerEntriesDo(BiConsumer layerConsumer)\n throws IOException {\n\n", "meta": {"hash_id": "0c814790c1fa239d965e7be9c7ee0d0ecb97e221141c91887ed4e98b0b4f843e"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 13, "content": " try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n String imageEntryName = imageEntry.getName();\n // assume all .tar.gz files are layers\n if (imageEntry.isFile() && imageEntryName.endsWith(\".tar.gz\")) {\n @SuppressWarnings(\"resource\") // must not close sub-streams\n TarArchiveInputStream layer = new TarArchiveInputStream(new GZIPInputStream(input));\n TarArchiveEntry layerEntry;\n while ((layerEntry = layer.getNextTarEntry()) != null) {\n layerConsumer.accept(imageEntryName, layerEntry);\n }\n }\n }\n }\n }\n\n", "meta": {"hash_id": "ae852a40ebdd8ad26fa422ae8173a6068e80014007a8018277eed5b8e289327c"}}, {"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 14, "content": " private static String extractFromTarFileAsString(File tarFile, String filename)\n throws IOException {\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(tarFile.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n if (filename.equals(imageEntry.getName())) {\n return CharStreams.toString(new InputStreamReader(input, StandardCharsets.UTF_8));\n }\n }\n }\n throw new AssertionError(\"file not found: \" + filename);\n }\n}\n", "meta": {"hash_id": "99912deb7335238dedadcdf3cbc33f14ad2c49beedfe049bae9316f8c017dd6e"}}]}], "golden_chunks": [{"doc_uuid": "bd642f9c2a6fa3b4643bf66c82f214dd6cea1dcff7f20ce8cad864503b7f40ee", "index": 5, "content": " @Test\n public void testTarballStructure() throws IOException {\n // known content should produce known results\n List actual = new ArrayList<>();\n try (TarArchiveInputStream input =\n new TarArchiveInputStream(Files.newInputStream(imageTar.toPath()))) {\n TarArchiveEntry imageEntry;\n while ((imageEntry = input.getNextTarEntry()) != null) {\n actual.add(imageEntry.getName());\n }\n }\n\n assertThat(actual)\n .containsExactly(\n \"c46572ef74f58d95e44dd36c1fbdfebd3752e8b56a794a13c11cfed35a1a6e1c.tar.gz\",\n \"6d2763b0f3940d324ea6b55386429e5b173899608abf7d1bff62e25dd2e4dcea.tar.gz\",\n \"530c1954a2b087d0b989895ea56435c9dc739a973f2d2b6cb9bb98e55bbea7ac.tar.gz\",\n \"config.json\",\n \"manifest.json\")\n .inOrder();\n }\n\n", "meta": {"hash_id": "8b029daabb3dd4c353c54e4a4f05f43865e1775199aa6d2faece7d3e7a165b71"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/integration-test/java/com/google/cloud/tools/jib/api/ReproducibleImageTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 15, "source": "codebases/jsonl"}} +{"query": "What package does the HelloWorld class belong to?", "answer": "The HelloWorld class belongs to the com.test package, as specified by this line at the top of the file:\n\npackage com.test;\n", "golden_doc_uuids": ["f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0"], "golden_chunk_uuids": [["f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", 0]], "golden_documents": [{"uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/resources/maven/projects/multi/complex-service/src/main/java/com/test/HelloWorld.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"hash_id": "943817101e639be7463b5f4b3ddad8f1f27fa50d14f43995d7f8083786a876c0"}}]}], "golden_chunks": [{"doc_uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"hash_id": "943817101e639be7463b5f4b3ddad8f1f27fa50d14f43995d7f8083786a876c0"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/resources/maven/projects/multi/complex-service/src/main/java/com/test/HelloWorld.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What license is this code released under?", "answer": "The code is licensed under the Apache License, Version 2.0, as stated in the code comments at the top of the file:\n\n/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n", "golden_doc_uuids": ["f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0"], "golden_chunk_uuids": [["f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", 0]], "golden_documents": [{"uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/resources/maven/projects/multi/complex-service/src/main/java/com/test/HelloWorld.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"hash_id": "943817101e639be7463b5f4b3ddad8f1f27fa50d14f43995d7f8083786a876c0"}}]}], "golden_chunks": [{"doc_uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"hash_id": "943817101e639be7463b5f4b3ddad8f1f27fa50d14f43995d7f8083786a876c0"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/resources/maven/projects/multi/complex-service/src/main/java/com/test/HelloWorld.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What year was this code copyrighted?", "answer": "According to the copyright notice at the top of the file, this code was copyrighted in 2018:\n\n/*\n * Copyright 2018 Google LLC. \n */\n", "golden_doc_uuids": ["f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0"], "golden_chunk_uuids": [["f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", 0]], "golden_documents": [{"uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/resources/maven/projects/multi/complex-service/src/main/java/com/test/HelloWorld.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"hash_id": "943817101e639be7463b5f4b3ddad8f1f27fa50d14f43995d7f8083786a876c0"}}]}], "golden_chunks": [{"doc_uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"hash_id": "943817101e639be7463b5f4b3ddad8f1f27fa50d14f43995d7f8083786a876c0"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/resources/maven/projects/multi/complex-service/src/main/java/com/test/HelloWorld.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What company owns the copyright to this code?", "answer": "The copyright notice states that Google LLC owns the copyright to this code:\n\n/*\n * Copyright 2018 Google LLC.\n */\n", "golden_doc_uuids": ["f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0"], "golden_chunk_uuids": [["f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", 0]], "golden_documents": [{"uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/resources/maven/projects/multi/complex-service/src/main/java/com/test/HelloWorld.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"hash_id": "943817101e639be7463b5f4b3ddad8f1f27fa50d14f43995d7f8083786a876c0"}}]}], "golden_chunks": [{"doc_uuid": "f59e2b84517250cd221b201aeac31b98b6174d155ea11255097ffb06702d29b0", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.test;\n\npublic class HelloWorld {\n public static void main(String[] args) {\n System.out.println(\"Hello world\");\n }\n}\n", "meta": {"hash_id": "943817101e639be7463b5f4b3ddad8f1f27fa50d14f43995d7f8083786a876c0"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/resources/maven/projects/multi/complex-service/src/main/java/com/test/HelloWorld.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How does MavenSettingsServerCredentials infer credentials for a server?", "answer": "The inferAuth method of MavenSettingsServerCredentials takes a server name as input and tries to find corresponding credentials in the Maven settings. It returns an Optional. Based on the tests, it looks for encrypted credentials, unencrypted credentials, and can handle server names with and without port numbers. If no matching server is found, it returns an empty Optional, as seen in this test:\n\n@Test\npublic void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n}\n", "golden_doc_uuids": ["26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08"], "golden_chunk_uuids": [["26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", 2], ["26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", 1], ["26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", 0]], "golden_documents": [{"uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/java/com/google/cloud/tools/jib/maven/MavenSettingsServerCredentialsTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "334768cc2980845386a3f3d2ebd5e9e0d5a70dede540695dbbf5a3ddda0a8599"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 1, "content": "package com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n", "meta": {"hash_id": "8b15dcfd6a515857cde2f2ad7402690e6fb7643778122af48d8c5454e3ca4ee8"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 2, "content": " private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n", "meta": {"hash_id": "dd8bf1b5e1afacb77c9eeda5177043a7df9b27bca265736a78a8c8c149093344"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 3, "content": " @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n", "meta": {"hash_id": "3c3dc92029b13cd58097b80e589cb38af1b49483c985959ea2e3fca1dd55b0ac"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 4, "content": " @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "ae601246fc7dfa62aba97afe01a03acada57908ff4c30f1d230926717663395c"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 5, "content": " @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "ef8ef9af68497ec93a3233df0c02380c89c4f4bd9b3e4d2978e16896a54c9330"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 6, "content": " @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "8a878c3d7fce5df1b85c96238ee874812f8cb27a11065494120a34036a0917dc"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 7, "content": " @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n", "meta": {"hash_id": "486ad1839398a0e8bd6d2b514b6c960f0b8ae79075b291a7ba469512f1c6a390"}}]}], "golden_chunks": [{"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 2, "content": " private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n", "meta": {"hash_id": "dd8bf1b5e1afacb77c9eeda5177043a7df9b27bca265736a78a8c8c149093344"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 1, "content": "package com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n", "meta": {"hash_id": "8b15dcfd6a515857cde2f2ad7402690e6fb7643778122af48d8c5454e3ca4ee8"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "334768cc2980845386a3f3d2ebd5e9e0d5a70dede540695dbbf5a3ddda0a8599"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/java/com/google/cloud/tools/jib/maven/MavenSettingsServerCredentialsTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}} +{"query": "What exceptions can be thrown when inferring credentials with MavenSettingsServerCredentials?", "answer": "The inferAuth method of MavenSettingsServerCredentials can throw an InferredAuthException if there is an issue decrypting credentials from the settings file. This is demonstrated in the following test:\n\n@Test\npublic void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n}\n", "golden_doc_uuids": ["26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08"], "golden_chunk_uuids": [["26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", 1]], "golden_documents": [{"uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/java/com/google/cloud/tools/jib/maven/MavenSettingsServerCredentialsTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "334768cc2980845386a3f3d2ebd5e9e0d5a70dede540695dbbf5a3ddda0a8599"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 1, "content": "package com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n", "meta": {"hash_id": "8b15dcfd6a515857cde2f2ad7402690e6fb7643778122af48d8c5454e3ca4ee8"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 2, "content": " private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n", "meta": {"hash_id": "dd8bf1b5e1afacb77c9eeda5177043a7df9b27bca265736a78a8c8c149093344"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 3, "content": " @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n", "meta": {"hash_id": "3c3dc92029b13cd58097b80e589cb38af1b49483c985959ea2e3fca1dd55b0ac"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 4, "content": " @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "ae601246fc7dfa62aba97afe01a03acada57908ff4c30f1d230926717663395c"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 5, "content": " @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "ef8ef9af68497ec93a3233df0c02380c89c4f4bd9b3e4d2978e16896a54c9330"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 6, "content": " @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "8a878c3d7fce5df1b85c96238ee874812f8cb27a11065494120a34036a0917dc"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 7, "content": " @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n", "meta": {"hash_id": "486ad1839398a0e8bd6d2b514b6c960f0b8ae79075b291a7ba469512f1c6a390"}}]}], "golden_chunks": [{"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 1, "content": "package com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n", "meta": {"hash_id": "8b15dcfd6a515857cde2f2ad7402690e6fb7643778122af48d8c5454e3ca4ee8"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/java/com/google/cloud/tools/jib/maven/MavenSettingsServerCredentialsTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}} +{"query": "What is the format of the returned AuthProperty when inferring credentials?", "answer": "When inferAuth successfully finds credentials for a server, it returns an Optional containing an AuthProperty object. Based on the tests, the AuthProperty contains a username and password, accessed via the getUsername() and getPassword() methods. For example:\n\nOptional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\nAssert.assertTrue(auth.isPresent());\nAssert.assertEquals(\"simpleUser\", auth.get().getUsername());\nAssert.assertEquals(\"password2\", auth.get().getPassword());\n", "golden_doc_uuids": ["26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08"], "golden_chunk_uuids": [["26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", 4]], "golden_documents": [{"uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/java/com/google/cloud/tools/jib/maven/MavenSettingsServerCredentialsTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "334768cc2980845386a3f3d2ebd5e9e0d5a70dede540695dbbf5a3ddda0a8599"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 1, "content": "package com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n", "meta": {"hash_id": "8b15dcfd6a515857cde2f2ad7402690e6fb7643778122af48d8c5454e3ca4ee8"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 2, "content": " private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n", "meta": {"hash_id": "dd8bf1b5e1afacb77c9eeda5177043a7df9b27bca265736a78a8c8c149093344"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 3, "content": " @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n", "meta": {"hash_id": "3c3dc92029b13cd58097b80e589cb38af1b49483c985959ea2e3fca1dd55b0ac"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 4, "content": " @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "ae601246fc7dfa62aba97afe01a03acada57908ff4c30f1d230926717663395c"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 5, "content": " @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "ef8ef9af68497ec93a3233df0c02380c89c4f4bd9b3e4d2978e16896a54c9330"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 6, "content": " @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "8a878c3d7fce5df1b85c96238ee874812f8cb27a11065494120a34036a0917dc"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 7, "content": " @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n", "meta": {"hash_id": "486ad1839398a0e8bd6d2b514b6c960f0b8ae79075b291a7ba469512f1c6a390"}}]}], "golden_chunks": [{"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 4, "content": " @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "ae601246fc7dfa62aba97afe01a03acada57908ff4c30f1d230926717663395c"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/java/com/google/cloud/tools/jib/maven/MavenSettingsServerCredentialsTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}} +{"query": "How are the test settings files used in the tests?", "answer": "The test class uses several XML files located in the src/test/resources/maven/settings directory as test inputs. These include:\n\n- settings.xml: A Maven settings file, passed to the MavenSettingsServerCredentials constructor. \n- settings-security.xml: A settings security file used to create a SettingsDecrypter, also passed to the constructor.\n- settings-security.empty.xml: An empty settings security file used for tests that don't require a master password.\n\nThe paths to these files are defined as member variables in the test class, for example:\n\nprivate Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\nprivate Path testSettingsSecurity = Paths.get(\"src/test/resources/maven/settings/settings-security.xml\"); \n", "golden_doc_uuids": ["26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08"], "golden_chunk_uuids": [["26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", 2]], "golden_documents": [{"uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/java/com/google/cloud/tools/jib/maven/MavenSettingsServerCredentialsTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "334768cc2980845386a3f3d2ebd5e9e0d5a70dede540695dbbf5a3ddda0a8599"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 1, "content": "package com.google.cloud.tools.jib.maven;\n\nimport com.google.cloud.tools.jib.plugins.common.AuthProperty;\nimport com.google.cloud.tools.jib.plugins.common.InferredAuthException;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.Optional;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\n\n/** Tests for {@link MavenSettingsServerCredentials}. */\npublic class MavenSettingsServerCredentialsTest {\n\n", "meta": {"hash_id": "8b15dcfd6a515857cde2f2ad7402690e6fb7643778122af48d8c5454e3ca4ee8"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 2, "content": " private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n", "meta": {"hash_id": "dd8bf1b5e1afacb77c9eeda5177043a7df9b27bca265736a78a8c8c149093344"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 3, "content": " @Before\n public void setUp() {\n mavenSettingsServerCredentials =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurity));\n mavenSettingsServerCredentialsNoMasterPassword =\n new MavenSettingsServerCredentials(\n SettingsFixture.newSettings(testSettings),\n SettingsFixture.newSettingsDecrypter(testSettingsSecurityEmpty));\n }\n\n @Test\n public void testInferredAuth_decrypterFailure() {\n try {\n mavenSettingsServerCredentials.inferAuth(\"badServer\");\n Assert.fail();\n } catch (InferredAuthException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.startsWith(\"Unable to decrypt server(badServer) info from settings.xml:\"));\n }\n }\n\n", "meta": {"hash_id": "3c3dc92029b13cd58097b80e589cb38af1b49483c985959ea2e3fca1dd55b0ac"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 4, "content": " @Test\n public void testInferredAuth_successEncrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"encryptedServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"encryptedUser\", auth.get().getUsername());\n Assert.assertEquals(\"password1\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_successUnencrypted() throws InferredAuthException {\n Optional auth = mavenSettingsServerCredentials.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "ae601246fc7dfa62aba97afe01a03acada57908ff4c30f1d230926717663395c"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 5, "content": " @Test\n public void testInferredAuth_successNoPasswordDoesNotBlowUp() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"simpleServer\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"simpleUser\", auth.get().getUsername());\n Assert.assertEquals(\"password2\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_registryWithHostAndPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:8080\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "ef8ef9af68497ec93a3233df0c02380c89c4f4bd9b3e4d2978e16896a54c9330"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 6, "content": " @Test\n public void testInferredAuth_registryWithHostWithoutPort() throws InferredAuthException {\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n", "meta": {"hash_id": "8a878c3d7fce5df1b85c96238ee874812f8cb27a11065494120a34036a0917dc"}}, {"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 7, "content": " @Test\n public void testInferredAuth_registrySettingsWithPort() throws InferredAuthException {\n // Attempt to resolve WITHOUT the port. Should work as well.\n Optional auth =\n mavenSettingsServerCredentialsNoMasterPassword.inferAuth(\"docker.example.com:5432\");\n Assert.assertTrue(auth.isPresent());\n Assert.assertEquals(\"registryUser\", auth.get().getUsername());\n Assert.assertEquals(\"registryPassword\", auth.get().getPassword());\n }\n\n @Test\n public void testInferredAuth_notFound() throws InferredAuthException {\n Assert.assertFalse(mavenSettingsServerCredentials.inferAuth(\"serverUnknown\").isPresent());\n }\n}\n", "meta": {"hash_id": "486ad1839398a0e8bd6d2b514b6c960f0b8ae79075b291a7ba469512f1c6a390"}}]}], "golden_chunks": [{"doc_uuid": "26dc2c78337411bfcc55ab9e71922005456242e97463ace852147e88984a5f08", "index": 2, "content": " private MavenSettingsServerCredentials mavenSettingsServerCredentialsNoMasterPassword;\n private MavenSettingsServerCredentials mavenSettingsServerCredentials;\n private Path testSettings = Paths.get(\"src/test/resources/maven/settings/settings.xml\");\n private Path testSettingsSecurity =\n Paths.get(\"src/test/resources/maven/settings/settings-security.xml\");\n private Path testSettingsSecurityEmpty =\n Paths.get(\"src/test/resources/maven/settings/settings-security.empty.xml\");\n\n", "meta": {"hash_id": "dd8bf1b5e1afacb77c9eeda5177043a7df9b27bca265736a78a8c8c149093344"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/test/java/com/google/cloud/tools/jib/maven/MavenSettingsServerCredentialsTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 8, "source": "codebases/jsonl"}} +{"query": "How does the testPull() method verify the correctness of the pulled BLOB?", "answer": "The testPull() method verifies the correctness of the pulled BLOB by comparing its digest with the expected digest. It pulls a layer BLOB of the \"distroless/base\" image using the RegistryClient, and then compares the digest of the pulled BLOB with the realDigest obtained from the image's manifest. The assertion Assert.assertEquals(realDigest, pulledBlob.writeTo(ByteStreams.nullOutputStream()).getDigest()) ensures that the digests match.", "golden_doc_uuids": ["e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05"], "golden_chunk_uuids": [["e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", 3]], "golden_documents": [{"uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.registry;\n\nimport com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.blob.Blob;\nimport com.google.cloud.tools.jib.event.EventHandlers;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.image.json.V22ManifestTemplate;\nimport com.google.common.io.ByteStreams;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.concurrent.atomic.LongAdder;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Test;\n\n/** Integration tests for {@link BlobPuller}. */\npublic class BlobPullerIntegrationTest {\n\n private final FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n\n @Test\n public void testPull() throws IOException, RegistryException {\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n V22ManifestTemplate manifestTemplate =\n registryClient\n .pullManifest(\n ManifestPullerIntegrationTest.KNOWN_MANIFEST_V22_SHA, V22ManifestTemplate.class)\n .getManifest();\n\n DescriptorDigest realDigest = manifestTemplate.getLayers().get(0).getDigest();\n\n // Pulls a layer BLOB of the distroless/base image.\n LongAdder totalByteCount = new LongAdder();\n LongAdder expectedSize = new LongAdder();\n Blob pulledBlob =\n registryClient.pullBlob(\n realDigest,\n size -> {\n Assert.assertEquals(0, expectedSize.sum());\n expectedSize.add(size);\n },\n totalByteCount::add);\n Assert.assertEquals(realDigest, pulledBlob.writeTo(ByteStreams.nullOutputStream()).getDigest());\n Assert.assertTrue(expectedSize.sum() > 0);\n Assert.assertEquals(expectedSize.sum(), totalByteCount.sum());\n }\n\n @Test\n public void testPull_unknownBlob() throws IOException, DigestException {\n DescriptorDigest nonexistentDigest =\n DescriptorDigest.fromHash(\n \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\");\n\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n\n try {\n registryClient\n .pullBlob(nonexistentDigest, ignored -> {}, ignored -> {})\n .writeTo(ByteStreams.nullOutputStream());\n Assert.fail(\"Trying to pull nonexistent blob should have errored\");\n\n } catch (IOException ex) {\n if (!(ex.getCause() instanceof RegistryErrorException)) {\n throw ex;\n }\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"pull BLOB for gcr.io/distroless/base with digest \" + nonexistentDigest));\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/integration-test/java/com/google/cloud/tools/jib/registry/BlobPullerIntegrationTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.registry;\n\n", "meta": {"hash_id": "e09e8d938ae1df316acb9044bd69f1c15a0ddbe3add044506066b8d1ba64149f"}}, {"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 1, "content": "import com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.blob.Blob;\nimport com.google.cloud.tools.jib.event.EventHandlers;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.image.json.V22ManifestTemplate;\nimport com.google.common.io.ByteStreams;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.concurrent.atomic.LongAdder;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Test;\n\n", "meta": {"hash_id": "9480ccde5106e263a7589e7843f661afcf6b771a9e69c9f044fbe35de827f06e"}}, {"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 2, "content": "/** Integration tests for {@link BlobPuller}. */\npublic class BlobPullerIntegrationTest {\n\n private final FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n\n @Test\n public void testPull() throws IOException, RegistryException {\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n V22ManifestTemplate manifestTemplate =\n registryClient\n .pullManifest(\n ManifestPullerIntegrationTest.KNOWN_MANIFEST_V22_SHA, V22ManifestTemplate.class)\n .getManifest();\n\n DescriptorDigest realDigest = manifestTemplate.getLayers().get(0).getDigest();\n\n", "meta": {"hash_id": "205b47f86f68014c9324d404256114dacf45a4e7fc193fa777b6fadd7a76fa47"}}, {"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 3, "content": " // Pulls a layer BLOB of the distroless/base image.\n LongAdder totalByteCount = new LongAdder();\n LongAdder expectedSize = new LongAdder();\n Blob pulledBlob =\n registryClient.pullBlob(\n realDigest,\n size -> {\n Assert.assertEquals(0, expectedSize.sum());\n expectedSize.add(size);\n },\n totalByteCount::add);\n Assert.assertEquals(realDigest, pulledBlob.writeTo(ByteStreams.nullOutputStream()).getDigest());\n Assert.assertTrue(expectedSize.sum() > 0);\n Assert.assertEquals(expectedSize.sum(), totalByteCount.sum());\n }\n\n", "meta": {"hash_id": "77547001ddbce5be971da9b90f719759971a471d61ce4c71022652f86fb66c2b"}}, {"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 4, "content": " @Test\n public void testPull_unknownBlob() throws IOException, DigestException {\n DescriptorDigest nonexistentDigest =\n DescriptorDigest.fromHash(\n \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\");\n\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n\n try {\n registryClient\n .pullBlob(nonexistentDigest, ignored -> {}, ignored -> {})\n .writeTo(ByteStreams.nullOutputStream());\n Assert.fail(\"Trying to pull nonexistent blob should have errored\");\n\n } catch (IOException ex) {\n if (!(ex.getCause() instanceof RegistryErrorException)) {\n throw ex;\n }\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"pull BLOB for gcr.io/distroless/base with digest \" + nonexistentDigest));\n }\n }\n}\n", "meta": {"hash_id": "698a8d1794ecd41476c7988336c9164f36989c4d6c7239c1496cff469f52ac46"}}]}], "golden_chunks": [{"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 3, "content": " // Pulls a layer BLOB of the distroless/base image.\n LongAdder totalByteCount = new LongAdder();\n LongAdder expectedSize = new LongAdder();\n Blob pulledBlob =\n registryClient.pullBlob(\n realDigest,\n size -> {\n Assert.assertEquals(0, expectedSize.sum());\n expectedSize.add(size);\n },\n totalByteCount::add);\n Assert.assertEquals(realDigest, pulledBlob.writeTo(ByteStreams.nullOutputStream()).getDigest());\n Assert.assertTrue(expectedSize.sum() > 0);\n Assert.assertEquals(expectedSize.sum(), totalByteCount.sum());\n }\n\n", "meta": {"hash_id": "77547001ddbce5be971da9b90f719759971a471d61ce4c71022652f86fb66c2b"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/integration-test/java/com/google/cloud/tools/jib/registry/BlobPullerIntegrationTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How is the RegistryClient instance created in the test methods?", "answer": "The RegistryClient instance is created using the RegistryClient.factory() method in both test methods. It takes the parameters EventHandlers.NONE for event handling, \"gcr.io\" as the registry, \"distroless/base\" as the repository, and the httpClient instance. The factory method returns a RegistryClient.Factory object, on which the newRegistryClient() method is called to obtain the actual RegistryClient instance.", "golden_doc_uuids": ["e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05"], "golden_chunk_uuids": [["e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", 2]], "golden_documents": [{"uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.registry;\n\nimport com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.blob.Blob;\nimport com.google.cloud.tools.jib.event.EventHandlers;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.image.json.V22ManifestTemplate;\nimport com.google.common.io.ByteStreams;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.concurrent.atomic.LongAdder;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Test;\n\n/** Integration tests for {@link BlobPuller}. */\npublic class BlobPullerIntegrationTest {\n\n private final FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n\n @Test\n public void testPull() throws IOException, RegistryException {\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n V22ManifestTemplate manifestTemplate =\n registryClient\n .pullManifest(\n ManifestPullerIntegrationTest.KNOWN_MANIFEST_V22_SHA, V22ManifestTemplate.class)\n .getManifest();\n\n DescriptorDigest realDigest = manifestTemplate.getLayers().get(0).getDigest();\n\n // Pulls a layer BLOB of the distroless/base image.\n LongAdder totalByteCount = new LongAdder();\n LongAdder expectedSize = new LongAdder();\n Blob pulledBlob =\n registryClient.pullBlob(\n realDigest,\n size -> {\n Assert.assertEquals(0, expectedSize.sum());\n expectedSize.add(size);\n },\n totalByteCount::add);\n Assert.assertEquals(realDigest, pulledBlob.writeTo(ByteStreams.nullOutputStream()).getDigest());\n Assert.assertTrue(expectedSize.sum() > 0);\n Assert.assertEquals(expectedSize.sum(), totalByteCount.sum());\n }\n\n @Test\n public void testPull_unknownBlob() throws IOException, DigestException {\n DescriptorDigest nonexistentDigest =\n DescriptorDigest.fromHash(\n \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\");\n\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n\n try {\n registryClient\n .pullBlob(nonexistentDigest, ignored -> {}, ignored -> {})\n .writeTo(ByteStreams.nullOutputStream());\n Assert.fail(\"Trying to pull nonexistent blob should have errored\");\n\n } catch (IOException ex) {\n if (!(ex.getCause() instanceof RegistryErrorException)) {\n throw ex;\n }\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"pull BLOB for gcr.io/distroless/base with digest \" + nonexistentDigest));\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/integration-test/java/com/google/cloud/tools/jib/registry/BlobPullerIntegrationTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.registry;\n\n", "meta": {"hash_id": "e09e8d938ae1df316acb9044bd69f1c15a0ddbe3add044506066b8d1ba64149f"}}, {"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 1, "content": "import com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.blob.Blob;\nimport com.google.cloud.tools.jib.event.EventHandlers;\nimport com.google.cloud.tools.jib.http.FailoverHttpClient;\nimport com.google.cloud.tools.jib.image.json.V22ManifestTemplate;\nimport com.google.common.io.ByteStreams;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.concurrent.atomic.LongAdder;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Test;\n\n", "meta": {"hash_id": "9480ccde5106e263a7589e7843f661afcf6b771a9e69c9f044fbe35de827f06e"}}, {"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 2, "content": "/** Integration tests for {@link BlobPuller}. */\npublic class BlobPullerIntegrationTest {\n\n private final FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n\n @Test\n public void testPull() throws IOException, RegistryException {\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n V22ManifestTemplate manifestTemplate =\n registryClient\n .pullManifest(\n ManifestPullerIntegrationTest.KNOWN_MANIFEST_V22_SHA, V22ManifestTemplate.class)\n .getManifest();\n\n DescriptorDigest realDigest = manifestTemplate.getLayers().get(0).getDigest();\n\n", "meta": {"hash_id": "205b47f86f68014c9324d404256114dacf45a4e7fc193fa777b6fadd7a76fa47"}}, {"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 3, "content": " // Pulls a layer BLOB of the distroless/base image.\n LongAdder totalByteCount = new LongAdder();\n LongAdder expectedSize = new LongAdder();\n Blob pulledBlob =\n registryClient.pullBlob(\n realDigest,\n size -> {\n Assert.assertEquals(0, expectedSize.sum());\n expectedSize.add(size);\n },\n totalByteCount::add);\n Assert.assertEquals(realDigest, pulledBlob.writeTo(ByteStreams.nullOutputStream()).getDigest());\n Assert.assertTrue(expectedSize.sum() > 0);\n Assert.assertEquals(expectedSize.sum(), totalByteCount.sum());\n }\n\n", "meta": {"hash_id": "77547001ddbce5be971da9b90f719759971a471d61ce4c71022652f86fb66c2b"}}, {"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 4, "content": " @Test\n public void testPull_unknownBlob() throws IOException, DigestException {\n DescriptorDigest nonexistentDigest =\n DescriptorDigest.fromHash(\n \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\");\n\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n\n try {\n registryClient\n .pullBlob(nonexistentDigest, ignored -> {}, ignored -> {})\n .writeTo(ByteStreams.nullOutputStream());\n Assert.fail(\"Trying to pull nonexistent blob should have errored\");\n\n } catch (IOException ex) {\n if (!(ex.getCause() instanceof RegistryErrorException)) {\n throw ex;\n }\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"pull BLOB for gcr.io/distroless/base with digest \" + nonexistentDigest));\n }\n }\n}\n", "meta": {"hash_id": "698a8d1794ecd41476c7988336c9164f36989c4d6c7239c1496cff469f52ac46"}}]}], "golden_chunks": [{"doc_uuid": "e23ba17cf99c628afc86f67b22af62848a96d600c7e29bfb7fb12e5cacea7f05", "index": 2, "content": "/** Integration tests for {@link BlobPuller}. */\npublic class BlobPullerIntegrationTest {\n\n private final FailoverHttpClient httpClient = new FailoverHttpClient(true, false, ignored -> {});\n\n @Test\n public void testPull() throws IOException, RegistryException {\n RegistryClient registryClient =\n RegistryClient.factory(EventHandlers.NONE, \"gcr.io\", \"distroless/base\", httpClient)\n .newRegistryClient();\n V22ManifestTemplate manifestTemplate =\n registryClient\n .pullManifest(\n ManifestPullerIntegrationTest.KNOWN_MANIFEST_V22_SHA, V22ManifestTemplate.class)\n .getManifest();\n\n DescriptorDigest realDigest = manifestTemplate.getLayers().get(0).getDigest();\n\n", "meta": {"hash_id": "205b47f86f68014c9324d404256114dacf45a4e7fc193fa777b6fadd7a76fa47"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/integration-test/java/com/google/cloud/tools/jib/registry/BlobPullerIntegrationTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How does JibBuildRunner handle a RegistryUnauthorizedException with a 403 Forbidden status code?", "answer": "When a RegistryUnauthorizedException with a 403 Forbidden status code is encountered, JibBuildRunner extracts the image reference from the exception and passes it to HelpfulSuggestions.forHttpStatusCodeForbidden() to get an appropriate error message:\n\nMockito.when(mockHttpResponseException.getStatusCode())\n .thenReturn(HttpStatusCodes.STATUS_CODE_FORBIDDEN);\n...\n} catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forHttpStatusCodeForbidden(\"someregistry/somerepository\"),\n ex.getMessage());\n}\n", "golden_doc_uuids": ["e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0"], "golden_chunk_uuids": [["e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", 9]], "golden_documents": [{"uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\nimport com.google.api.client.http.HttpResponseException;\nimport com.google.api.client.http.HttpStatusCodes;\nimport com.google.cloud.tools.jib.api.CacheDirectoryCreationException;\nimport com.google.cloud.tools.jib.api.Containerizer;\nimport com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.ImageReference;\nimport com.google.cloud.tools.jib.api.InsecureRegistryException;\nimport com.google.cloud.tools.jib.api.JibContainer;\nimport com.google.cloud.tools.jib.api.JibContainerBuilder;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.api.RegistryUnauthorizedException;\nimport com.google.cloud.tools.jib.registry.RegistryCredentialsNotSentException;\nimport com.google.common.collect.ImmutableSet;\nimport java.io.IOException;\nimport java.net.UnknownHostException;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport org.apache.http.conn.HttpHostConnectException;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.Mockito;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n/** Tests for {@link JibBuildRunner}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class JibBuildRunnerTest {\n\n private static final HelpfulSuggestions TEST_HELPFUL_SUGGESTIONS =\n new HelpfulSuggestions(\n \"messagePrefix\", \"clearCacheCommand\", \"toConfig\", \"toFlag\", \"buildFile\");\n\n @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Mock private JibContainerBuilder mockJibContainerBuilder;\n @Mock private JibContainer mockJibContainer;\n @Mock private Containerizer mockContainerizer;\n @Mock private RegistryUnauthorizedException mockRegistryUnauthorizedException;\n @Mock private RegistryCredentialsNotSentException mockRegistryCredentialsNotSentException;\n @Mock private HttpResponseException mockHttpResponseException;\n\n private JibBuildRunner testJibBuildRunner;\n\n @Before\n public void setUpMocks() {\n testJibBuildRunner =\n new JibBuildRunner(\n mockJibContainerBuilder,\n mockContainerizer,\n ignored -> {},\n TEST_HELPFUL_SUGGESTIONS,\n \"ignored\",\n \"ignored\");\n }\n\n @Test\n public void testBuildImage_pass()\n throws BuildStepsExecutionException, IOException, CacheDirectoryCreationException {\n JibContainer buildResult = testJibBuildRunner.runBuild();\n Assert.assertNull(buildResult);\n }\n\n @Test\n public void testBuildImage_httpHostConnectException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n HttpHostConnectException mockHttpHostConnectException =\n Mockito.mock(HttpHostConnectException.class);\n Mockito.doThrow(mockHttpHostConnectException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forHttpHostConnect(), ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_unknownHostException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n UnknownHostException mockUnknownHostException = Mockito.mock(UnknownHostException.class);\n Mockito.doThrow(mockUnknownHostException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forUnknownHost(), ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_insecureRegistryException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n InsecureRegistryException mockInsecureRegistryException =\n Mockito.mock(InsecureRegistryException.class);\n Mockito.doThrow(mockInsecureRegistryException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forInsecureRegistry(), ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_registryUnauthorizedException_statusCodeForbidden()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode())\n .thenReturn(HttpStatusCodes.STATUS_CODE_FORBIDDEN);\n\n Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forHttpStatusCodeForbidden(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_registryUnauthorizedException_noCredentials()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode()).thenReturn(-1); // Unknown\n\n Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forNoCredentialsDefined(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_registryCredentialsNotSentException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(mockRegistryCredentialsNotSentException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forCredentialsNotSent(), ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_other()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(new RegistryException(\"messagePrefix\"))\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.none(), ex.getMessage());\n }\n }\n\n @Test\n public void testBuildImage_writesImageJson() throws Exception {\n final ImageReference targetImageReference = ImageReference.parse(\"gcr.io/distroless/java:11\");\n final String imageId =\n \"sha256:61bb3ec31a47cb730eb58a38bbfa813761a51dca69d10e39c24c3d00a7b2c7a9\";\n final String digest = \"sha256:3f1be7e19129edb202c071a659a4db35280ab2bb1a16f223bfd5d1948657b6fc\";\n final Set tags = ImmutableSet.of(\"latest\", \"0.1.41-69d10e-20200116T101403\");\n\n final Path outputPath = temporaryFolder.newFile(\"jib-image.json\").toPath();\n\n Mockito.when(mockJibContainer.getTargetImage()).thenReturn(targetImageReference);\n Mockito.when(mockJibContainer.getImageId()).thenReturn(DescriptorDigest.fromDigest(imageId));\n Mockito.when(mockJibContainer.getDigest()).thenReturn(DescriptorDigest.fromDigest(digest));\n Mockito.when(mockJibContainer.getTags()).thenReturn(tags);\n Mockito.when(mockJibContainerBuilder.containerize(mockContainerizer))\n .thenReturn(mockJibContainer);\n Mockito.when(mockJibContainer.isImagePushed()).thenReturn(true);\n testJibBuildRunner.writeImageJson(outputPath).runBuild();\n\n final String outputJson = new String(Files.readAllBytes(outputPath), StandardCharsets.UTF_8);\n final ImageMetadataOutput metadataOutput = ImageMetadataOutput.fromJson(outputJson);\n Assert.assertEquals(targetImageReference.toString(), metadataOutput.getImage());\n Assert.assertEquals(imageId, metadataOutput.getImageId());\n Assert.assertEquals(digest, metadataOutput.getImageDigest());\n Assert.assertEquals(tags, ImmutableSet.copyOf(metadataOutput.getTags()));\n Assert.assertTrue(metadataOutput.isImagePushed());\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-plugins-common/src/test/java/com/google/cloud/tools/jib/plugins/common/JibBuildRunnerTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 16, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.plugins.common;\n\n", "meta": {"hash_id": "52f73327edc9b7b963466d9e941576ef3c6c1139d9f1e9a06c858e8b1cd59dc9"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 1, "content": "import com.google.api.client.http.HttpResponseException;\nimport com.google.api.client.http.HttpStatusCodes;\nimport com.google.cloud.tools.jib.api.CacheDirectoryCreationException;\nimport com.google.cloud.tools.jib.api.Containerizer;\nimport com.google.cloud.tools.jib.api.DescriptorDigest;\nimport com.google.cloud.tools.jib.api.ImageReference;\nimport com.google.cloud.tools.jib.api.InsecureRegistryException;\nimport com.google.cloud.tools.jib.api.JibContainer;\nimport com.google.cloud.tools.jib.api.JibContainerBuilder;\nimport com.google.cloud.tools.jib.api.RegistryException;\nimport com.google.cloud.tools.jib.api.RegistryUnauthorizedException;\nimport com.google.cloud.tools.jib.registry.RegistryCredentialsNotSentException;\nimport com.google.common.collect.ImmutableSet;\nimport java.io.IOException;\nimport java.net.UnknownHostException;\n", "meta": {"hash_id": "c57a1b6cfac0068a97b72db784a76caca2124ba0c4a9d5514e158a6829f54789"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 2, "content": "import java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.Set;\nimport java.util.concurrent.ExecutionException;\nimport org.apache.http.conn.HttpHostConnectException;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\nimport org.junit.runner.RunWith;\nimport org.mockito.Mock;\nimport org.mockito.Mockito;\nimport org.mockito.junit.MockitoJUnitRunner;\n\n", "meta": {"hash_id": "b2627e9f50593b0633ce7be596f65bd2f91f8e0b2f7eb0c48e6af60d01129f50"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 3, "content": "/** Tests for {@link JibBuildRunner}. */\n@RunWith(MockitoJUnitRunner.class)\npublic class JibBuildRunnerTest {\n\n private static final HelpfulSuggestions TEST_HELPFUL_SUGGESTIONS =\n new HelpfulSuggestions(\n \"messagePrefix\", \"clearCacheCommand\", \"toConfig\", \"toFlag\", \"buildFile\");\n\n @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();\n\n @Mock private JibContainerBuilder mockJibContainerBuilder;\n @Mock private JibContainer mockJibContainer;\n @Mock private Containerizer mockContainerizer;\n @Mock private RegistryUnauthorizedException mockRegistryUnauthorizedException;\n @Mock private RegistryCredentialsNotSentException mockRegistryCredentialsNotSentException;\n @Mock private HttpResponseException mockHttpResponseException;\n\n", "meta": {"hash_id": "214324bbe4f228bf7096b55e7999e4f601f1fd619cb65d9c7e65b7d1be31e400"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 4, "content": " private JibBuildRunner testJibBuildRunner;\n\n @Before\n public void setUpMocks() {\n testJibBuildRunner =\n new JibBuildRunner(\n mockJibContainerBuilder,\n mockContainerizer,\n ignored -> {},\n TEST_HELPFUL_SUGGESTIONS,\n \"ignored\",\n \"ignored\");\n }\n\n @Test\n public void testBuildImage_pass()\n throws BuildStepsExecutionException, IOException, CacheDirectoryCreationException {\n JibContainer buildResult = testJibBuildRunner.runBuild();\n Assert.assertNull(buildResult);\n }\n\n", "meta": {"hash_id": "d745949b87f5d93c1f21243a22c36441f93c34b9826f71a97526a1fa2fad34ba"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 5, "content": " @Test\n public void testBuildImage_httpHostConnectException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n HttpHostConnectException mockHttpHostConnectException =\n Mockito.mock(HttpHostConnectException.class);\n Mockito.doThrow(mockHttpHostConnectException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forHttpHostConnect(), ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "0bedc3e0205a009015e01c0660065f12a28cb65fac315e82265cad798df547d6"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 6, "content": " @Test\n public void testBuildImage_unknownHostException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n UnknownHostException mockUnknownHostException = Mockito.mock(UnknownHostException.class);\n Mockito.doThrow(mockUnknownHostException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forUnknownHost(), ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "02dfb922e2283c1154ffbf85783bba4145bcb1534265ea35daece39bef1bcb0c"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 7, "content": " @Test\n public void testBuildImage_insecureRegistryException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n InsecureRegistryException mockInsecureRegistryException =\n Mockito.mock(InsecureRegistryException.class);\n Mockito.doThrow(mockInsecureRegistryException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forInsecureRegistry(), ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "4fef3a34074fc785f0313cf82a6ec695da2b2e91ac4fb60dfd5599e4d01eb311"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 8, "content": " @Test\n public void testBuildImage_registryUnauthorizedException_statusCodeForbidden()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode())\n .thenReturn(HttpStatusCodes.STATUS_CODE_FORBIDDEN);\n\n", "meta": {"hash_id": "506c1e597ec6eb297fdc98158364916506ec8b3327c4f51739f992d2fe45b766"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 9, "content": " Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forHttpStatusCodeForbidden(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "568cf671fd982f74e8ef293eb9b2f94d6c0c3e567b21cbed16129cc4d38361ee"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 10, "content": " @Test\n public void testBuildImage_registryUnauthorizedException_noCredentials()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.when(mockRegistryUnauthorizedException.getHttpResponseException())\n .thenReturn(mockHttpResponseException);\n Mockito.when(mockRegistryUnauthorizedException.getImageReference())\n .thenReturn(\"someregistry/somerepository\");\n Mockito.when(mockHttpResponseException.getStatusCode()).thenReturn(-1); // Unknown\n\n Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forNoCredentialsDefined(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "72a0ac8d10374701f55930439986025cd94c169f71bfc918550e466cc62f75d4"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 11, "content": " @Test\n public void testBuildImage_registryCredentialsNotSentException()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(mockRegistryCredentialsNotSentException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forCredentialsNotSent(), ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "77f0e24d4a4aafe960f2f037734bd31c462bddad2022b203c8cc17b8f29aceba"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 12, "content": " @Test\n public void testBuildImage_other()\n throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException,\n ExecutionException {\n Mockito.doThrow(new RegistryException(\"messagePrefix\"))\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.none(), ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "a4868e713e0dd8b5272baf6f66904d5647129e003de1014ddd2e7aca6402f5bf"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 13, "content": " @Test\n public void testBuildImage_writesImageJson() throws Exception {\n final ImageReference targetImageReference = ImageReference.parse(\"gcr.io/distroless/java:11\");\n final String imageId =\n \"sha256:61bb3ec31a47cb730eb58a38bbfa813761a51dca69d10e39c24c3d00a7b2c7a9\";\n final String digest = \"sha256:3f1be7e19129edb202c071a659a4db35280ab2bb1a16f223bfd5d1948657b6fc\";\n final Set tags = ImmutableSet.of(\"latest\", \"0.1.41-69d10e-20200116T101403\");\n\n final Path outputPath = temporaryFolder.newFile(\"jib-image.json\").toPath();\n\n", "meta": {"hash_id": "240012a575ab1d12852fdfb207ca155204be91de74c0525e182a4494beb805f0"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 14, "content": " Mockito.when(mockJibContainer.getTargetImage()).thenReturn(targetImageReference);\n Mockito.when(mockJibContainer.getImageId()).thenReturn(DescriptorDigest.fromDigest(imageId));\n Mockito.when(mockJibContainer.getDigest()).thenReturn(DescriptorDigest.fromDigest(digest));\n Mockito.when(mockJibContainer.getTags()).thenReturn(tags);\n Mockito.when(mockJibContainerBuilder.containerize(mockContainerizer))\n .thenReturn(mockJibContainer);\n Mockito.when(mockJibContainer.isImagePushed()).thenReturn(true);\n testJibBuildRunner.writeImageJson(outputPath).runBuild();\n\n", "meta": {"hash_id": "7a9903be522bc4988a955a5945284594a58d7fd0e717724318912852a2c94dd4"}}, {"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 15, "content": " final String outputJson = new String(Files.readAllBytes(outputPath), StandardCharsets.UTF_8);\n final ImageMetadataOutput metadataOutput = ImageMetadataOutput.fromJson(outputJson);\n Assert.assertEquals(targetImageReference.toString(), metadataOutput.getImage());\n Assert.assertEquals(imageId, metadataOutput.getImageId());\n Assert.assertEquals(digest, metadataOutput.getImageDigest());\n Assert.assertEquals(tags, ImmutableSet.copyOf(metadataOutput.getTags()));\n Assert.assertTrue(metadataOutput.isImagePushed());\n }\n}\n", "meta": {"hash_id": "65ed4cdff8ac995766657fbfcc1fae25d7ee77cc395d5ce330c43a2d3fa95140"}}]}], "golden_chunks": [{"doc_uuid": "e357fbf7acddc26c61f964891486394c176804f31a319d178f3a4c9b41dd47e0", "index": 9, "content": " Mockito.doThrow(mockRegistryUnauthorizedException)\n .when(mockJibContainerBuilder)\n .containerize(mockContainerizer);\n\n try {\n testJibBuildRunner.runBuild();\n Assert.fail();\n\n } catch (BuildStepsExecutionException ex) {\n Assert.assertEquals(\n TEST_HELPFUL_SUGGESTIONS.forHttpStatusCodeForbidden(\"someregistry/somerepository\"),\n ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "568cf671fd982f74e8ef293eb9b2f94d6c0c3e567b21cbed16129cc4d38361ee"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-plugins-common/src/test/java/com/google/cloud/tools/jib/plugins/common/JibBuildRunnerTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 16, "source": "codebases/jsonl"}} +{"query": "How does the buildToDockerDaemonAndRun method verify the built image?", "answer": "The buildToDockerDaemonAndRun method first calls buildToDockerDaemon to build the image. It then runs several 'docker inspect' commands to verify the image's volumes, exposed ports, and labels match the expected values, asserting the output contains specific strings. For example:\n\nString dockerInspectVolumes =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Volumes}}'\", imageReference).run();\nMatcherAssert.assertThat(\n dockerInspectVolumes, CoreMatchers.containsString(\"\\\"/var/log\\\":{},\\\"/var/log2\\\":{}\"));\n\nFinally, it runs the built image using 'docker run' and returns the output.", "golden_doc_uuids": ["c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf"], "golden_chunk_uuids": [["c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", 6], ["c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", 4], ["c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", 3], ["c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", 2], ["c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", 1], ["c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", 0]], "golden_documents": [{"uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.maven;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.Command;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.Arrays;\nimport org.apache.maven.it.VerificationException;\nimport org.apache.maven.it.Verifier;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Assume;\nimport org.junit.ClassRule;\nimport org.junit.Test;\n\n/** Integration tests for {@link BuildDockerMojo}. */\npublic class BuildDockerMojoIntegrationTest {\n\n @ClassRule public static final TestProject simpleTestProject = new TestProject(\"simple\");\n\n @ClassRule public static final TestProject emptyTestProject = new TestProject(\"empty\");\n\n @ClassRule\n public static final TestProject defaultTargetTestProject = new TestProject(\"default-target\");\n\n private static void buildToDockerDaemon(TestProject project, String imageReference, String pomXml)\n throws VerificationException, DigestException, IOException {\n Verifier verifier = new Verifier(project.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", imageReference);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=\" + pomXml);\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyErrorFreeLog();\n\n BuildImageMojoIntegrationTest.readDigestFile(\n project.getProjectRoot().resolve(\"target/jib-image.digest\"));\n }\n\n /**\n * Builds and runs jib:buildDocker on a project at {@code projectRoot} pushing to {@code\n * imageReference}.\n */\n private static String buildToDockerDaemonAndRun(TestProject project, String imageReference)\n throws VerificationException, IOException, InterruptedException, DigestException {\n buildToDockerDaemon(project, imageReference, \"pom.xml\");\n\n String dockerInspectVolumes =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Volumes}}'\", imageReference).run();\n String dockerInspectExposedPorts =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.ExposedPorts}}'\", imageReference)\n .run();\n String dockerInspectLabels =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Labels}}'\", imageReference).run();\n String history = new Command(\"docker\", \"history\", imageReference).run();\n\n MatcherAssert.assertThat(\n dockerInspectVolumes, CoreMatchers.containsString(\"\\\"/var/log\\\":{},\\\"/var/log2\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectExposedPorts,\n CoreMatchers.containsString(\n \"\\\"1000/tcp\\\":{},\\\"2000/udp\\\":{},\\\"2001/udp\\\":{},\\\"2002/udp\\\":{},\\\"2003/udp\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectLabels,\n CoreMatchers.containsString(\"\\\"key1\\\":\\\"value1\\\",\\\"key2\\\":\\\"value2\\\"\"));\n\n return new Command(\"docker\", \"run\", \"--rm\", imageReference).run();\n }\n\n @Test\n public void testExecute_simple()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\n \"Hello, world. An argument.\\n1970-01-01T00:00:01Z\\nrw-r--r--\\nrw-r--r--\\nfoo\\ncat\\n\"\n + \"1970-01-01T00:00:01Z\\n1970-01-01T00:00:01Z\\n\",\n buildToDockerDaemonAndRun(simpleTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_simple_extraDirectoriesFiltering()\n throws DigestException, IOException, InterruptedException, VerificationException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-extra-dirs-filtering.xml\");\n String output =\n new Command(\"docker\", \"run\", \"--rm\", \"--entrypoint=ls\", targetImage, \"-1R\", \"/extras\")\n .run();\n\n // /extras/cat.txt\n // /extras/foo\n // /extras/sub/\n // /extras/sub/a.json\n assertThat(output).isEqualTo(\"/extras:\\ncat.txt\\nfoo\\nsub\\n\\n/extras/sub:\\na.json\\n\");\n }\n\n @Test\n public void testExecute_dockerClient()\n throws VerificationException, IOException, InterruptedException {\n Assume.assumeFalse(System.getProperty(\"os.name\").startsWith(\"Windows\"));\n new Command(\n \"chmod\", \"+x\", simpleTestProject.getProjectRoot().resolve(\"mock-docker.sh\").toString())\n .run();\n\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-dockerclient.xml\");\n verifier.addCliOption(\"--debug\");\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyTextInLog(\"Docker load called. value1 value2\");\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_empty()\n throws InterruptedException, IOException, VerificationException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\"\", buildToDockerDaemonAndRun(emptyTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_defaultTarget()\n throws VerificationException, IOException, InterruptedException, DigestException {\n Assert.assertEquals(\n \"Hello, world. An argument.\\n\",\n buildToDockerDaemonAndRun(\n defaultTargetTestProject, \"default-target-name:default-target-version\"));\n }\n\n @Test\n public void testExecute_jibSkip() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibSkip(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_jibContainerizeSkips() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibContainerizeSkips(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_userNumeric()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom.xml\");\n Assert.assertEquals(\n \"12345:54321\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_userNames()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"brokenuserimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom-broken-user.xml\");\n Assert.assertEquals(\n \"myuser:mygroup\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_noToImageAndInvalidProjectName()\n throws DigestException, VerificationException, IOException, InterruptedException {\n buildToDockerDaemon(simpleTestProject, \"image reference ignored\", \"pom-no-to-image.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", \"my-artifact-id:1\").run());\n }\n\n @Test\n public void testExecute_jarContainerization()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"jarcontainerizationimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-jar-containerization.xml\");\n Assert.assertEquals(\n \"Hello, world. \\nImplementation-Title: hello-world\\nImplementation-Version: 1\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testExecute_jarContainerizationOnMissingJar() throws IOException {\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"_TARGET_IMAGE\", \"erroronmissingjar\");\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-jar-containerization.xml\");\n verifier.executeGoals(Arrays.asList(\"clean\", \"jib:dockerBuild\"));\n Assert.fail();\n\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"Obtaining project build output files failed; make sure you have packaged your \"\n + \"project before trying to build the image. (Did you accidentally run \\\"mvn \"\n + \"clean jib:build\\\" instead of \\\"mvn clean package jib:build\\\"?)\"));\n }\n }\n\n @Test\n public void testExecute_jibRequireVersion_ok() throws VerificationException, IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n // this plugin should match 1.0\n verifier.setSystemProperty(\"jib.requiredVersion\", \"1.0\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_jibRequireVersion_fail() throws IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.requiredVersion\", \"[,1.0]\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n Assert.fail();\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(), CoreMatchers.containsString(\"but is required to be [,1.0]\"));\n }\n }\n\n @Test\n public void testCredHelperConfigurationSimple()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-1.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testCredHelperConfigurationComplex()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-2.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/integration-test/java/com/google/cloud/tools/jib/maven/BuildDockerMojoIntegrationTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 16, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "d955a82ddb22ea6f2aa7a2f44dd574cd96125a3f2e6405ecf3034f0f08f4b95d"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 1, "content": "package com.google.cloud.tools.jib.maven;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.Command;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.Arrays;\nimport org.apache.maven.it.VerificationException;\nimport org.apache.maven.it.Verifier;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Assume;\nimport org.junit.ClassRule;\nimport org.junit.Test;\n\n/** Integration tests for {@link BuildDockerMojo}. */\npublic class BuildDockerMojoIntegrationTest {\n\n @ClassRule public static final TestProject simpleTestProject = new TestProject(\"simple\");\n\n @ClassRule public static final TestProject emptyTestProject = new TestProject(\"empty\");\n\n @ClassRule\n public static final TestProject defaultTargetTestProject = new TestProject(\"default-target\");\n\n", "meta": {"hash_id": "9d57c9b853ed16f596faff68f88d8da8c0d8434c44859f029d453c39a4f5b9d2"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 2, "content": " private static void buildToDockerDaemon(TestProject project, String imageReference, String pomXml)\n throws VerificationException, DigestException, IOException {\n Verifier verifier = new Verifier(project.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", imageReference);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=\" + pomXml);\n verifier.executeGoal(\"package\");\n\n", "meta": {"hash_id": "7d27a616545c2f33ed65143ff8e0f1d48bc8831d37a78eb19203652b166f77f4"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 3, "content": " verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyErrorFreeLog();\n\n BuildImageMojoIntegrationTest.readDigestFile(\n project.getProjectRoot().resolve(\"target/jib-image.digest\"));\n }\n\n /**\n * Builds and runs jib:buildDocker on a project at {@code projectRoot} pushing to {@code\n * imageReference}.\n */\n private static String buildToDockerDaemonAndRun(TestProject project, String imageReference)\n throws VerificationException, IOException, InterruptedException, DigestException {\n buildToDockerDaemon(project, imageReference, \"pom.xml\");\n\n", "meta": {"hash_id": "a0c9f163cbc43da3ee6293bb5f4a8ae3cba44693c3191fa4e1320e35435ff982"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 4, "content": " String dockerInspectVolumes =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Volumes}}'\", imageReference).run();\n String dockerInspectExposedPorts =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.ExposedPorts}}'\", imageReference)\n .run();\n String dockerInspectLabels =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Labels}}'\", imageReference).run();\n String history = new Command(\"docker\", \"history\", imageReference).run();\n\n MatcherAssert.assertThat(\n dockerInspectVolumes, CoreMatchers.containsString(\"\\\"/var/log\\\":{},\\\"/var/log2\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectExposedPorts,\n CoreMatchers.containsString(\n \"\\\"1000/tcp\\\":{},\\\"2000/udp\\\":{},\\\"2001/udp\\\":{},\\\"2002/udp\\\":{},\\\"2003/udp\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectLabels,\n CoreMatchers.containsString(\"\\\"key1\\\":\\\"value1\\\",\\\"key2\\\":\\\"value2\\\"\"));\n\n", "meta": {"hash_id": "e4c7ab3dbf7c6b83b14f9cc37e44b3713d6b6c86b489dacdfd3f2161960aa01c"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 5, "content": " return new Command(\"docker\", \"run\", \"--rm\", imageReference).run();\n }\n\n @Test\n public void testExecute_simple()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\n \"Hello, world. An argument.\\n1970-01-01T00:00:01Z\\nrw-r--r--\\nrw-r--r--\\nfoo\\ncat\\n\"\n + \"1970-01-01T00:00:01Z\\n1970-01-01T00:00:01Z\\n\",\n buildToDockerDaemonAndRun(simpleTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n", "meta": {"hash_id": "68e7f24cb180a2e46dfae279608f5a93e40f6fd3e3a29ed50d15a54cbd0b2a5d"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 6, "content": " @Test\n public void testExecute_simple_extraDirectoriesFiltering()\n throws DigestException, IOException, InterruptedException, VerificationException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-extra-dirs-filtering.xml\");\n String output =\n new Command(\"docker\", \"run\", \"--rm\", \"--entrypoint=ls\", targetImage, \"-1R\", \"/extras\")\n .run();\n\n // /extras/cat.txt\n // /extras/foo\n // /extras/sub/\n // /extras/sub/a.json\n assertThat(output).isEqualTo(\"/extras:\\ncat.txt\\nfoo\\nsub\\n\\n/extras/sub:\\na.json\\n\");\n }\n\n @Test\n public void testExecute_dockerClient()\n throws VerificationException, IOException, InterruptedException {\n Assume.assumeFalse(System.getProperty(\"os.name\").startsWith(\"Windows\"));\n new Command(\n \"chmod\", \"+x\", simpleTestProject.getProjectRoot().resolve(\"mock-docker.sh\").toString())\n .run();\n\n", "meta": {"hash_id": "ede9fc9b519970c378efeeb310ee5a6a6a6229e91b5df28ad6e56a9c2c052397"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 7, "content": " String targetImage = \"simpleimage:maven\" + System.nanoTime();\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-dockerclient.xml\");\n verifier.addCliOption(\"--debug\");\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyTextInLog(\"Docker load called. value1 value2\");\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_empty()\n throws InterruptedException, IOException, VerificationException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n\n", "meta": {"hash_id": "63fbf5da149ca4a2716552a9fed5491174b12b01d5a557782a3eacf8bde26066"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 8, "content": " Assert.assertEquals(\"\", buildToDockerDaemonAndRun(emptyTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_defaultTarget()\n throws VerificationException, IOException, InterruptedException, DigestException {\n Assert.assertEquals(\n \"Hello, world. An argument.\\n\",\n buildToDockerDaemonAndRun(\n defaultTargetTestProject, \"default-target-name:default-target-version\"));\n }\n\n", "meta": {"hash_id": "6ba47a5b7e142a34e32f1358c9d9719db0a90bae5b1ae90ceea9141677b3f43c"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 9, "content": " @Test\n public void testExecute_jibSkip() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibSkip(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_jibContainerizeSkips() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibContainerizeSkips(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_userNumeric()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom.xml\");\n Assert.assertEquals(\n \"12345:54321\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n", "meta": {"hash_id": "71cc98df471c9a5486a4acc2f1a256b31872b15885a16861626aef47fba518ce"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 10, "content": " @Test\n public void testExecute_userNames()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"brokenuserimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom-broken-user.xml\");\n Assert.assertEquals(\n \"myuser:mygroup\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_noToImageAndInvalidProjectName()\n throws DigestException, VerificationException, IOException, InterruptedException {\n buildToDockerDaemon(simpleTestProject, \"image reference ignored\", \"pom-no-to-image.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", \"my-artifact-id:1\").run());\n }\n\n", "meta": {"hash_id": "13d8ab306a83ed5f27a603e0c8225b4c1c820713b5228448c805cb7c67b8dc65"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 11, "content": " @Test\n public void testExecute_jarContainerization()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"jarcontainerizationimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-jar-containerization.xml\");\n Assert.assertEquals(\n \"Hello, world. \\nImplementation-Title: hello-world\\nImplementation-Version: 1\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testExecute_jarContainerizationOnMissingJar() throws IOException {\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"_TARGET_IMAGE\", \"erroronmissingjar\");\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-jar-containerization.xml\");\n verifier.executeGoals(Arrays.asList(\"clean\", \"jib:dockerBuild\"));\n Assert.fail();\n\n", "meta": {"hash_id": "6e2405d2af7f862d899efe10886b85cad36c495a85d4e502096881168992437b"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 12, "content": " } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"Obtaining project build output files failed; make sure you have packaged your \"\n + \"project before trying to build the image. (Did you accidentally run \\\"mvn \"\n + \"clean jib:build\\\" instead of \\\"mvn clean package jib:build\\\"?)\"));\n }\n }\n\n @Test\n public void testExecute_jibRequireVersion_ok() throws VerificationException, IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n", "meta": {"hash_id": "e196962f9e9cb84084a175832c0ecd7e779d248adf73277e35481d515bb17b92"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 13, "content": " Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n // this plugin should match 1.0\n verifier.setSystemProperty(\"jib.requiredVersion\", \"1.0\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_jibRequireVersion_fail() throws IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n", "meta": {"hash_id": "e1b839f589c146dc2d1b0c5bc9c5546079bd2153a88e7bc762cca1a1a1edcaa6"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 14, "content": " try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.requiredVersion\", \"[,1.0]\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n Assert.fail();\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(), CoreMatchers.containsString(\"but is required to be [,1.0]\"));\n }\n }\n\n", "meta": {"hash_id": "49f622eb6c52e78fb880adeb274fc8ca8c25c4238f65fa0611e946bc1d16e626"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 15, "content": " @Test\n public void testCredHelperConfigurationSimple()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-1.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testCredHelperConfigurationComplex()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-2.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n}\n", "meta": {"hash_id": "5831ec9184845cf9239c2f3d3ab8f7c65605595c1fb4b58903ac77b33d527076"}}]}], "golden_chunks": [{"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 6, "content": " @Test\n public void testExecute_simple_extraDirectoriesFiltering()\n throws DigestException, IOException, InterruptedException, VerificationException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-extra-dirs-filtering.xml\");\n String output =\n new Command(\"docker\", \"run\", \"--rm\", \"--entrypoint=ls\", targetImage, \"-1R\", \"/extras\")\n .run();\n\n // /extras/cat.txt\n // /extras/foo\n // /extras/sub/\n // /extras/sub/a.json\n assertThat(output).isEqualTo(\"/extras:\\ncat.txt\\nfoo\\nsub\\n\\n/extras/sub:\\na.json\\n\");\n }\n\n @Test\n public void testExecute_dockerClient()\n throws VerificationException, IOException, InterruptedException {\n Assume.assumeFalse(System.getProperty(\"os.name\").startsWith(\"Windows\"));\n new Command(\n \"chmod\", \"+x\", simpleTestProject.getProjectRoot().resolve(\"mock-docker.sh\").toString())\n .run();\n\n", "meta": {"hash_id": "ede9fc9b519970c378efeeb310ee5a6a6a6229e91b5df28ad6e56a9c2c052397"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 4, "content": " String dockerInspectVolumes =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Volumes}}'\", imageReference).run();\n String dockerInspectExposedPorts =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.ExposedPorts}}'\", imageReference)\n .run();\n String dockerInspectLabels =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Labels}}'\", imageReference).run();\n String history = new Command(\"docker\", \"history\", imageReference).run();\n\n MatcherAssert.assertThat(\n dockerInspectVolumes, CoreMatchers.containsString(\"\\\"/var/log\\\":{},\\\"/var/log2\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectExposedPorts,\n CoreMatchers.containsString(\n \"\\\"1000/tcp\\\":{},\\\"2000/udp\\\":{},\\\"2001/udp\\\":{},\\\"2002/udp\\\":{},\\\"2003/udp\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectLabels,\n CoreMatchers.containsString(\"\\\"key1\\\":\\\"value1\\\",\\\"key2\\\":\\\"value2\\\"\"));\n\n", "meta": {"hash_id": "e4c7ab3dbf7c6b83b14f9cc37e44b3713d6b6c86b489dacdfd3f2161960aa01c"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 3, "content": " verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyErrorFreeLog();\n\n BuildImageMojoIntegrationTest.readDigestFile(\n project.getProjectRoot().resolve(\"target/jib-image.digest\"));\n }\n\n /**\n * Builds and runs jib:buildDocker on a project at {@code projectRoot} pushing to {@code\n * imageReference}.\n */\n private static String buildToDockerDaemonAndRun(TestProject project, String imageReference)\n throws VerificationException, IOException, InterruptedException, DigestException {\n buildToDockerDaemon(project, imageReference, \"pom.xml\");\n\n", "meta": {"hash_id": "a0c9f163cbc43da3ee6293bb5f4a8ae3cba44693c3191fa4e1320e35435ff982"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 2, "content": " private static void buildToDockerDaemon(TestProject project, String imageReference, String pomXml)\n throws VerificationException, DigestException, IOException {\n Verifier verifier = new Verifier(project.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", imageReference);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=\" + pomXml);\n verifier.executeGoal(\"package\");\n\n", "meta": {"hash_id": "7d27a616545c2f33ed65143ff8e0f1d48bc8831d37a78eb19203652b166f77f4"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 1, "content": "package com.google.cloud.tools.jib.maven;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.Command;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.Arrays;\nimport org.apache.maven.it.VerificationException;\nimport org.apache.maven.it.Verifier;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Assume;\nimport org.junit.ClassRule;\nimport org.junit.Test;\n\n/** Integration tests for {@link BuildDockerMojo}. */\npublic class BuildDockerMojoIntegrationTest {\n\n @ClassRule public static final TestProject simpleTestProject = new TestProject(\"simple\");\n\n @ClassRule public static final TestProject emptyTestProject = new TestProject(\"empty\");\n\n @ClassRule\n public static final TestProject defaultTargetTestProject = new TestProject(\"default-target\");\n\n", "meta": {"hash_id": "9d57c9b853ed16f596faff68f88d8da8c0d8434c44859f029d453c39a4f5b9d2"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "d955a82ddb22ea6f2aa7a2f44dd574cd96125a3f2e6405ecf3034f0f08f4b95d"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/integration-test/java/com/google/cloud/tools/jib/maven/BuildDockerMojoIntegrationTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 16, "source": "codebases/jsonl"}} +{"query": "How does the testExecute_dockerClient test work?", "answer": "The testExecute_dockerClient test verifies that Jib can build an image using an external Docker client, as opposed to the usual Docker daemon. It does this by:\n\n1. Making the mock 'mock-docker.sh' script executable\n2. Configuring the Verifier with a 'pom-dockerclient.xml' pom file\n3. Executing the 'package' and 'jib:dockerBuild' goals\n4. Verifying the expected \"Docker load called. value1 value2\" string is logged, indicating the external Docker client was invoked\n\nThe actual external Docker client is simulated by the 'mock-docker.sh' script in this test.", "golden_doc_uuids": ["c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf"], "golden_chunk_uuids": [["c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", 7]], "golden_documents": [{"uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.maven;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.Command;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.Arrays;\nimport org.apache.maven.it.VerificationException;\nimport org.apache.maven.it.Verifier;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Assume;\nimport org.junit.ClassRule;\nimport org.junit.Test;\n\n/** Integration tests for {@link BuildDockerMojo}. */\npublic class BuildDockerMojoIntegrationTest {\n\n @ClassRule public static final TestProject simpleTestProject = new TestProject(\"simple\");\n\n @ClassRule public static final TestProject emptyTestProject = new TestProject(\"empty\");\n\n @ClassRule\n public static final TestProject defaultTargetTestProject = new TestProject(\"default-target\");\n\n private static void buildToDockerDaemon(TestProject project, String imageReference, String pomXml)\n throws VerificationException, DigestException, IOException {\n Verifier verifier = new Verifier(project.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", imageReference);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=\" + pomXml);\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyErrorFreeLog();\n\n BuildImageMojoIntegrationTest.readDigestFile(\n project.getProjectRoot().resolve(\"target/jib-image.digest\"));\n }\n\n /**\n * Builds and runs jib:buildDocker on a project at {@code projectRoot} pushing to {@code\n * imageReference}.\n */\n private static String buildToDockerDaemonAndRun(TestProject project, String imageReference)\n throws VerificationException, IOException, InterruptedException, DigestException {\n buildToDockerDaemon(project, imageReference, \"pom.xml\");\n\n String dockerInspectVolumes =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Volumes}}'\", imageReference).run();\n String dockerInspectExposedPorts =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.ExposedPorts}}'\", imageReference)\n .run();\n String dockerInspectLabels =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Labels}}'\", imageReference).run();\n String history = new Command(\"docker\", \"history\", imageReference).run();\n\n MatcherAssert.assertThat(\n dockerInspectVolumes, CoreMatchers.containsString(\"\\\"/var/log\\\":{},\\\"/var/log2\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectExposedPorts,\n CoreMatchers.containsString(\n \"\\\"1000/tcp\\\":{},\\\"2000/udp\\\":{},\\\"2001/udp\\\":{},\\\"2002/udp\\\":{},\\\"2003/udp\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectLabels,\n CoreMatchers.containsString(\"\\\"key1\\\":\\\"value1\\\",\\\"key2\\\":\\\"value2\\\"\"));\n\n return new Command(\"docker\", \"run\", \"--rm\", imageReference).run();\n }\n\n @Test\n public void testExecute_simple()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\n \"Hello, world. An argument.\\n1970-01-01T00:00:01Z\\nrw-r--r--\\nrw-r--r--\\nfoo\\ncat\\n\"\n + \"1970-01-01T00:00:01Z\\n1970-01-01T00:00:01Z\\n\",\n buildToDockerDaemonAndRun(simpleTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_simple_extraDirectoriesFiltering()\n throws DigestException, IOException, InterruptedException, VerificationException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-extra-dirs-filtering.xml\");\n String output =\n new Command(\"docker\", \"run\", \"--rm\", \"--entrypoint=ls\", targetImage, \"-1R\", \"/extras\")\n .run();\n\n // /extras/cat.txt\n // /extras/foo\n // /extras/sub/\n // /extras/sub/a.json\n assertThat(output).isEqualTo(\"/extras:\\ncat.txt\\nfoo\\nsub\\n\\n/extras/sub:\\na.json\\n\");\n }\n\n @Test\n public void testExecute_dockerClient()\n throws VerificationException, IOException, InterruptedException {\n Assume.assumeFalse(System.getProperty(\"os.name\").startsWith(\"Windows\"));\n new Command(\n \"chmod\", \"+x\", simpleTestProject.getProjectRoot().resolve(\"mock-docker.sh\").toString())\n .run();\n\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-dockerclient.xml\");\n verifier.addCliOption(\"--debug\");\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyTextInLog(\"Docker load called. value1 value2\");\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_empty()\n throws InterruptedException, IOException, VerificationException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\"\", buildToDockerDaemonAndRun(emptyTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_defaultTarget()\n throws VerificationException, IOException, InterruptedException, DigestException {\n Assert.assertEquals(\n \"Hello, world. An argument.\\n\",\n buildToDockerDaemonAndRun(\n defaultTargetTestProject, \"default-target-name:default-target-version\"));\n }\n\n @Test\n public void testExecute_jibSkip() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibSkip(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_jibContainerizeSkips() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibContainerizeSkips(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_userNumeric()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom.xml\");\n Assert.assertEquals(\n \"12345:54321\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_userNames()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"brokenuserimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom-broken-user.xml\");\n Assert.assertEquals(\n \"myuser:mygroup\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_noToImageAndInvalidProjectName()\n throws DigestException, VerificationException, IOException, InterruptedException {\n buildToDockerDaemon(simpleTestProject, \"image reference ignored\", \"pom-no-to-image.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", \"my-artifact-id:1\").run());\n }\n\n @Test\n public void testExecute_jarContainerization()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"jarcontainerizationimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-jar-containerization.xml\");\n Assert.assertEquals(\n \"Hello, world. \\nImplementation-Title: hello-world\\nImplementation-Version: 1\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testExecute_jarContainerizationOnMissingJar() throws IOException {\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"_TARGET_IMAGE\", \"erroronmissingjar\");\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-jar-containerization.xml\");\n verifier.executeGoals(Arrays.asList(\"clean\", \"jib:dockerBuild\"));\n Assert.fail();\n\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"Obtaining project build output files failed; make sure you have packaged your \"\n + \"project before trying to build the image. (Did you accidentally run \\\"mvn \"\n + \"clean jib:build\\\" instead of \\\"mvn clean package jib:build\\\"?)\"));\n }\n }\n\n @Test\n public void testExecute_jibRequireVersion_ok() throws VerificationException, IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n // this plugin should match 1.0\n verifier.setSystemProperty(\"jib.requiredVersion\", \"1.0\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_jibRequireVersion_fail() throws IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.requiredVersion\", \"[,1.0]\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n Assert.fail();\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(), CoreMatchers.containsString(\"but is required to be [,1.0]\"));\n }\n }\n\n @Test\n public void testCredHelperConfigurationSimple()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-1.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testCredHelperConfigurationComplex()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-2.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/integration-test/java/com/google/cloud/tools/jib/maven/BuildDockerMojoIntegrationTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 16, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 0, "content": "/*\n * Copyright 2018 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "d955a82ddb22ea6f2aa7a2f44dd574cd96125a3f2e6405ecf3034f0f08f4b95d"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 1, "content": "package com.google.cloud.tools.jib.maven;\n\nimport static com.google.common.truth.Truth.assertThat;\n\nimport com.google.cloud.tools.jib.Command;\nimport java.io.IOException;\nimport java.security.DigestException;\nimport java.util.Arrays;\nimport org.apache.maven.it.VerificationException;\nimport org.apache.maven.it.Verifier;\nimport org.hamcrest.CoreMatchers;\nimport org.hamcrest.MatcherAssert;\nimport org.junit.Assert;\nimport org.junit.Assume;\nimport org.junit.ClassRule;\nimport org.junit.Test;\n\n/** Integration tests for {@link BuildDockerMojo}. */\npublic class BuildDockerMojoIntegrationTest {\n\n @ClassRule public static final TestProject simpleTestProject = new TestProject(\"simple\");\n\n @ClassRule public static final TestProject emptyTestProject = new TestProject(\"empty\");\n\n @ClassRule\n public static final TestProject defaultTargetTestProject = new TestProject(\"default-target\");\n\n", "meta": {"hash_id": "9d57c9b853ed16f596faff68f88d8da8c0d8434c44859f029d453c39a4f5b9d2"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 2, "content": " private static void buildToDockerDaemon(TestProject project, String imageReference, String pomXml)\n throws VerificationException, DigestException, IOException {\n Verifier verifier = new Verifier(project.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", imageReference);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=\" + pomXml);\n verifier.executeGoal(\"package\");\n\n", "meta": {"hash_id": "7d27a616545c2f33ed65143ff8e0f1d48bc8831d37a78eb19203652b166f77f4"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 3, "content": " verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyErrorFreeLog();\n\n BuildImageMojoIntegrationTest.readDigestFile(\n project.getProjectRoot().resolve(\"target/jib-image.digest\"));\n }\n\n /**\n * Builds and runs jib:buildDocker on a project at {@code projectRoot} pushing to {@code\n * imageReference}.\n */\n private static String buildToDockerDaemonAndRun(TestProject project, String imageReference)\n throws VerificationException, IOException, InterruptedException, DigestException {\n buildToDockerDaemon(project, imageReference, \"pom.xml\");\n\n", "meta": {"hash_id": "a0c9f163cbc43da3ee6293bb5f4a8ae3cba44693c3191fa4e1320e35435ff982"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 4, "content": " String dockerInspectVolumes =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Volumes}}'\", imageReference).run();\n String dockerInspectExposedPorts =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.ExposedPorts}}'\", imageReference)\n .run();\n String dockerInspectLabels =\n new Command(\"docker\", \"inspect\", \"-f\", \"'{{json .Config.Labels}}'\", imageReference).run();\n String history = new Command(\"docker\", \"history\", imageReference).run();\n\n MatcherAssert.assertThat(\n dockerInspectVolumes, CoreMatchers.containsString(\"\\\"/var/log\\\":{},\\\"/var/log2\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectExposedPorts,\n CoreMatchers.containsString(\n \"\\\"1000/tcp\\\":{},\\\"2000/udp\\\":{},\\\"2001/udp\\\":{},\\\"2002/udp\\\":{},\\\"2003/udp\\\":{}\"));\n MatcherAssert.assertThat(\n dockerInspectLabels,\n CoreMatchers.containsString(\"\\\"key1\\\":\\\"value1\\\",\\\"key2\\\":\\\"value2\\\"\"));\n\n", "meta": {"hash_id": "e4c7ab3dbf7c6b83b14f9cc37e44b3713d6b6c86b489dacdfd3f2161960aa01c"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 5, "content": " return new Command(\"docker\", \"run\", \"--rm\", imageReference).run();\n }\n\n @Test\n public void testExecute_simple()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n Assert.assertEquals(\n \"Hello, world. An argument.\\n1970-01-01T00:00:01Z\\nrw-r--r--\\nrw-r--r--\\nfoo\\ncat\\n\"\n + \"1970-01-01T00:00:01Z\\n1970-01-01T00:00:01Z\\n\",\n buildToDockerDaemonAndRun(simpleTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n", "meta": {"hash_id": "68e7f24cb180a2e46dfae279608f5a93e40f6fd3e3a29ed50d15a54cbd0b2a5d"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 6, "content": " @Test\n public void testExecute_simple_extraDirectoriesFiltering()\n throws DigestException, IOException, InterruptedException, VerificationException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-extra-dirs-filtering.xml\");\n String output =\n new Command(\"docker\", \"run\", \"--rm\", \"--entrypoint=ls\", targetImage, \"-1R\", \"/extras\")\n .run();\n\n // /extras/cat.txt\n // /extras/foo\n // /extras/sub/\n // /extras/sub/a.json\n assertThat(output).isEqualTo(\"/extras:\\ncat.txt\\nfoo\\nsub\\n\\n/extras/sub:\\na.json\\n\");\n }\n\n @Test\n public void testExecute_dockerClient()\n throws VerificationException, IOException, InterruptedException {\n Assume.assumeFalse(System.getProperty(\"os.name\").startsWith(\"Windows\"));\n new Command(\n \"chmod\", \"+x\", simpleTestProject.getProjectRoot().resolve(\"mock-docker.sh\").toString())\n .run();\n\n", "meta": {"hash_id": "ede9fc9b519970c378efeeb310ee5a6a6a6229e91b5df28ad6e56a9c2c052397"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 7, "content": " String targetImage = \"simpleimage:maven\" + System.nanoTime();\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-dockerclient.xml\");\n verifier.addCliOption(\"--debug\");\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyTextInLog(\"Docker load called. value1 value2\");\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_empty()\n throws InterruptedException, IOException, VerificationException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n\n", "meta": {"hash_id": "63fbf5da149ca4a2716552a9fed5491174b12b01d5a557782a3eacf8bde26066"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 8, "content": " Assert.assertEquals(\"\", buildToDockerDaemonAndRun(emptyTestProject, targetImage));\n Assert.assertEquals(\n \"1970-01-01T00:00:00Z\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Created}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_defaultTarget()\n throws VerificationException, IOException, InterruptedException, DigestException {\n Assert.assertEquals(\n \"Hello, world. An argument.\\n\",\n buildToDockerDaemonAndRun(\n defaultTargetTestProject, \"default-target-name:default-target-version\"));\n }\n\n", "meta": {"hash_id": "6ba47a5b7e142a34e32f1358c9d9719db0a90bae5b1ae90ceea9141677b3f43c"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 9, "content": " @Test\n public void testExecute_jibSkip() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibSkip(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_jibContainerizeSkips() throws VerificationException, IOException {\n SkippedGoalVerifier.verifyJibContainerizeSkips(emptyTestProject, BuildDockerMojo.GOAL_NAME);\n }\n\n @Test\n public void testExecute_userNumeric()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom.xml\");\n Assert.assertEquals(\n \"12345:54321\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n", "meta": {"hash_id": "71cc98df471c9a5486a4acc2f1a256b31872b15885a16861626aef47fba518ce"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 10, "content": " @Test\n public void testExecute_userNames()\n throws VerificationException, IOException, InterruptedException, DigestException {\n String targetImage = \"brokenuserimage:maven\" + System.nanoTime();\n buildToDockerDaemon(emptyTestProject, targetImage, \"pom-broken-user.xml\");\n Assert.assertEquals(\n \"myuser:mygroup\",\n new Command(\"docker\", \"inspect\", \"-f\", \"{{.Config.User}}\", targetImage).run().trim());\n }\n\n @Test\n public void testExecute_noToImageAndInvalidProjectName()\n throws DigestException, VerificationException, IOException, InterruptedException {\n buildToDockerDaemon(simpleTestProject, \"image reference ignored\", \"pom-no-to-image.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", \"my-artifact-id:1\").run());\n }\n\n", "meta": {"hash_id": "13d8ab306a83ed5f27a603e0c8225b4c1c820713b5228448c805cb7c67b8dc65"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 11, "content": " @Test\n public void testExecute_jarContainerization()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"jarcontainerizationimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-jar-containerization.xml\");\n Assert.assertEquals(\n \"Hello, world. \\nImplementation-Title: hello-world\\nImplementation-Version: 1\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testExecute_jarContainerizationOnMissingJar() throws IOException {\n try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"_TARGET_IMAGE\", \"erroronmissingjar\");\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-jar-containerization.xml\");\n verifier.executeGoals(Arrays.asList(\"clean\", \"jib:dockerBuild\"));\n Assert.fail();\n\n", "meta": {"hash_id": "6e2405d2af7f862d899efe10886b85cad36c495a85d4e502096881168992437b"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 12, "content": " } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(),\n CoreMatchers.containsString(\n \"Obtaining project build output files failed; make sure you have packaged your \"\n + \"project before trying to build the image. (Did you accidentally run \\\"mvn \"\n + \"clean jib:build\\\" instead of \\\"mvn clean package jib:build\\\"?)\"));\n }\n }\n\n @Test\n public void testExecute_jibRequireVersion_ok() throws VerificationException, IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n", "meta": {"hash_id": "e196962f9e9cb84084a175832c0ecd7e779d248adf73277e35481d515bb17b92"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 13, "content": " Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n // this plugin should match 1.0\n verifier.setSystemProperty(\"jib.requiredVersion\", \"1.0\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_jibRequireVersion_fail() throws IOException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n\n", "meta": {"hash_id": "e1b839f589c146dc2d1b0c5bc9c5546079bd2153a88e7bc762cca1a1a1edcaa6"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 14, "content": " try {\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.requiredVersion\", \"[,1.0]\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.executeGoals(Arrays.asList(\"package\", \"jib:dockerBuild\"));\n Assert.fail();\n } catch (VerificationException ex) {\n MatcherAssert.assertThat(\n ex.getMessage(), CoreMatchers.containsString(\"but is required to be [,1.0]\"));\n }\n }\n\n", "meta": {"hash_id": "49f622eb6c52e78fb880adeb274fc8ca8c25c4238f65fa0611e946bc1d16e626"}}, {"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 15, "content": " @Test\n public void testCredHelperConfigurationSimple()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-1.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n\n @Test\n public void testCredHelperConfigurationComplex()\n throws DigestException, VerificationException, IOException, InterruptedException {\n String targetImage = \"simpleimage:maven\" + System.nanoTime();\n buildToDockerDaemon(simpleTestProject, targetImage, \"pom-cred-helper-2.xml\");\n Assert.assertEquals(\n \"Hello, world. \\n1970-01-01T00:00:01Z\\n\",\n new Command(\"docker\", \"run\", \"--rm\", targetImage).run());\n }\n}\n", "meta": {"hash_id": "5831ec9184845cf9239c2f3d3ab8f7c65605595c1fb4b58903ac77b33d527076"}}]}], "golden_chunks": [{"doc_uuid": "c5cc0b3e2067eb3f0eec0e111678da31cd242215e7c815420964ca9d607e7ddf", "index": 7, "content": " String targetImage = \"simpleimage:maven\" + System.nanoTime();\n Verifier verifier = new Verifier(simpleTestProject.getProjectRoot().toString());\n verifier.setSystemProperty(\"jib.useOnlyProjectCache\", \"true\");\n verifier.setSystemProperty(\"_TARGET_IMAGE\", targetImage);\n verifier.setAutoclean(false);\n verifier.addCliOption(\"--file=pom-dockerclient.xml\");\n verifier.addCliOption(\"--debug\");\n verifier.executeGoal(\"package\");\n\n verifier.executeGoal(\"jib:dockerBuild\");\n verifier.verifyTextInLog(\"Docker load called. value1 value2\");\n verifier.verifyErrorFreeLog();\n }\n\n @Test\n public void testExecute_empty()\n throws InterruptedException, IOException, VerificationException, DigestException {\n String targetImage = \"emptyimage:maven\" + System.nanoTime();\n\n", "meta": {"hash_id": "63fbf5da149ca4a2716552a9fed5491174b12b01d5a557782a3eacf8bde26066"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-maven-plugin/src/integration-test/java/com/google/cloud/tools/jib/maven/BuildDockerMojoIntegrationTest.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 16, "source": "codebases/jsonl"}} +{"query": "What exception is thrown when registry authentication fails?", "answer": "When registry authentication fails, a `RegistryAuthenticationFailedException` is thrown. This is evident from the class declaration:\n```java\npublic class RegistryAuthenticationFailedException extends RegistryException {\n // ...\n}\n```", "golden_doc_uuids": ["eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09"], "golden_chunk_uuids": [["eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", 1]], "golden_documents": [{"uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", "content": "/*\n * Copyright 2017 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.api;\n\nimport java.text.MessageFormat;\n\n/** Thrown because registry authentication failed. */\npublic class RegistryAuthenticationFailedException extends RegistryException {\n\n private static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n private final String serverUrl;\n private final String imageName;\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param cause the underlying cause that triggered this exception\n */\n public RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param reason the underlying reason that triggered this exception\n */\n public RegistryAuthenticationFailedException(String serverUrl, String imageName, String reason) {\n super(MessageFormat.format(REASON, serverUrl, imageName, reason));\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * The server being authenticated.\n *\n * @return the server being authenticated\n */\n public String getServerUrl() {\n return serverUrl;\n }\n\n /**\n * The image being authenticated.\n *\n * @return the image being authenticated\n */\n public String getImageName() {\n return imageName;\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/main/java/com/google/cloud/tools/jib/api/RegistryAuthenticationFailedException.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", "index": 0, "content": "/*\n * Copyright 2017 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "90de5bada3b5cee745e550695572df31695fb1d28856623e9c0a83f77ab0ea5a"}}, {"doc_uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", "index": 1, "content": "package com.google.cloud.tools.jib.api;\n\nimport java.text.MessageFormat;\n\n/** Thrown because registry authentication failed. */\npublic class RegistryAuthenticationFailedException extends RegistryException {\n\n private static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n private final String serverUrl;\n private final String imageName;\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param cause the underlying cause that triggered this exception\n */\n public RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n", "meta": {"hash_id": "37be69927eff92a2d82a933b2dbdc3f87226f49882954e3c12aa5ef08c2d5f75"}}, {"doc_uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", "index": 2, "content": " /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param reason the underlying reason that triggered this exception\n */\n public RegistryAuthenticationFailedException(String serverUrl, String imageName, String reason) {\n super(MessageFormat.format(REASON, serverUrl, imageName, reason));\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * The server being authenticated.\n *\n * @return the server being authenticated\n */\n public String getServerUrl() {\n return serverUrl;\n }\n\n /**\n * The image being authenticated.\n *\n * @return the image being authenticated\n */\n public String getImageName() {\n return imageName;\n }\n}\n", "meta": {"hash_id": "fbb7801e52b7e9ff6e5a9d2607205dec4a9c05428149a7a75b7d4dc08c3955da"}}]}], "golden_chunks": [{"doc_uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", "index": 1, "content": "package com.google.cloud.tools.jib.api;\n\nimport java.text.MessageFormat;\n\n/** Thrown because registry authentication failed. */\npublic class RegistryAuthenticationFailedException extends RegistryException {\n\n private static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n private final String serverUrl;\n private final String imageName;\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param cause the underlying cause that triggered this exception\n */\n public RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n", "meta": {"hash_id": "37be69927eff92a2d82a933b2dbdc3f87226f49882954e3c12aa5ef08c2d5f75"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/main/java/com/google/cloud/tools/jib/api/RegistryAuthenticationFailedException.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What information is included in the exception message when a `RegistryAuthenticationFailedException` is thrown?", "answer": "The exception message includes the registry server URL, the image name that requires authentication, and the reason for the authentication failure. This can be seen in the `REASON` constant and the constructor implementations:\n```java\nprivate static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n\npublic RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n // ...\n}\n\npublic RegistryAuthenticationFailedException(String serverUrl, String imageName, String reason) {\n super(MessageFormat.format(REASON, serverUrl, imageName, reason));\n // ...\n}\n```", "golden_doc_uuids": ["eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09"], "golden_chunk_uuids": [["eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", 1]], "golden_documents": [{"uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", "content": "/*\n * Copyright 2017 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\npackage com.google.cloud.tools.jib.api;\n\nimport java.text.MessageFormat;\n\n/** Thrown because registry authentication failed. */\npublic class RegistryAuthenticationFailedException extends RegistryException {\n\n private static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n private final String serverUrl;\n private final String imageName;\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param cause the underlying cause that triggered this exception\n */\n public RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param reason the underlying reason that triggered this exception\n */\n public RegistryAuthenticationFailedException(String serverUrl, String imageName, String reason) {\n super(MessageFormat.format(REASON, serverUrl, imageName, reason));\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * The server being authenticated.\n *\n * @return the server being authenticated\n */\n public String getServerUrl() {\n return serverUrl;\n }\n\n /**\n * The image being authenticated.\n *\n * @return the image being authenticated\n */\n public String getImageName() {\n return imageName;\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/main/java/com/google/cloud/tools/jib/api/RegistryAuthenticationFailedException.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", "index": 0, "content": "/*\n * Copyright 2017 Google LLC.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n * use this file except in compliance with the License. You may obtain a copy of\n * the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n * License for the specific language governing permissions and limitations under\n * the License.\n */\n\n", "meta": {"hash_id": "90de5bada3b5cee745e550695572df31695fb1d28856623e9c0a83f77ab0ea5a"}}, {"doc_uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", "index": 1, "content": "package com.google.cloud.tools.jib.api;\n\nimport java.text.MessageFormat;\n\n/** Thrown because registry authentication failed. */\npublic class RegistryAuthenticationFailedException extends RegistryException {\n\n private static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n private final String serverUrl;\n private final String imageName;\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param cause the underlying cause that triggered this exception\n */\n public RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n", "meta": {"hash_id": "37be69927eff92a2d82a933b2dbdc3f87226f49882954e3c12aa5ef08c2d5f75"}}, {"doc_uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", "index": 2, "content": " /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param reason the underlying reason that triggered this exception\n */\n public RegistryAuthenticationFailedException(String serverUrl, String imageName, String reason) {\n super(MessageFormat.format(REASON, serverUrl, imageName, reason));\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n /**\n * The server being authenticated.\n *\n * @return the server being authenticated\n */\n public String getServerUrl() {\n return serverUrl;\n }\n\n /**\n * The image being authenticated.\n *\n * @return the image being authenticated\n */\n public String getImageName() {\n return imageName;\n }\n}\n", "meta": {"hash_id": "fbb7801e52b7e9ff6e5a9d2607205dec4a9c05428149a7a75b7d4dc08c3955da"}}]}], "golden_chunks": [{"doc_uuid": "eabb6a15874b3744292a5808f64e7a6ddf5b7114f80bad3306c2af61d8799b09", "index": 1, "content": "package com.google.cloud.tools.jib.api;\n\nimport java.text.MessageFormat;\n\n/** Thrown because registry authentication failed. */\npublic class RegistryAuthenticationFailedException extends RegistryException {\n\n private static final String REASON = \"Failed to authenticate with registry {0}/{1} because: {2}\";\n private final String serverUrl;\n private final String imageName;\n\n /**\n * Creates a new exception with a human readable message.\n *\n * @param serverUrl the registry server url\n * @param imageName the image name that requires authentication\n * @param cause the underlying cause that triggered this exception\n */\n public RegistryAuthenticationFailedException(\n String serverUrl, String imageName, Throwable cause) {\n super(MessageFormat.format(REASON, serverUrl, imageName, cause.getMessage()), cause);\n this.serverUrl = serverUrl;\n this.imageName = imageName;\n }\n\n", "meta": {"hash_id": "37be69927eff92a2d82a933b2dbdc3f87226f49882954e3c12aa5ef08c2d5f75"}}], "meta": {"doctype": "codebase", "relative_path": "/jib-core/src/main/java/com/google/cloud/tools/jib/api/RegistryAuthenticationFailedException.java", "repo_name": "GoogleContainerTools/jib", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What is the default length of a generated pepper when no length is specified?", "answer": "As per the testSaltNoLength() test method, when generating a pepper without specifying a length, the default length is expected to be 24. This is verified by the assertion:\n\nAssert.assertEquals(24, pepper.length());\n", "golden_doc_uuids": ["a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b"], "golden_chunk_uuids": [["a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", 0]], "golden_documents": [{"uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/PepperGeneratorTest.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "index": 0, "content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n", "meta": {"hash_id": "2791f9cb9cef36aebfdae5c23853e6dea4357d4a96f23ace8ce1512711b1ff4a"}}, {"doc_uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "index": 1, "content": " @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n", "meta": {"hash_id": "e6135c81d5cdd028e1286c8ee4a7c090964f41d3e4603bfdaf92e76ab96dc007"}}]}], "golden_chunks": [{"doc_uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "index": 0, "content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n", "meta": {"hash_id": "2791f9cb9cef36aebfdae5c23853e6dea4357d4a96f23ace8ce1512711b1ff4a"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/PepperGeneratorTest.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "Is it possible to generate a pepper with a length of zero?", "answer": "Yes, based on the testSaltZeroLength() test method, it is possible to generate a pepper with a length of zero. The test verifies that the generated pepper is not null and has a length of zero, as asserted by:\n\nAssert.assertNotNull(pepper);\nAssert.assertEquals(length, pepper.length());\n", "golden_doc_uuids": ["a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b"], "golden_chunk_uuids": [["a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", 1]], "golden_documents": [{"uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/PepperGeneratorTest.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "index": 0, "content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n", "meta": {"hash_id": "2791f9cb9cef36aebfdae5c23853e6dea4357d4a96f23ace8ce1512711b1ff4a"}}, {"doc_uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "index": 1, "content": " @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n", "meta": {"hash_id": "e6135c81d5cdd028e1286c8ee4a7c090964f41d3e4603bfdaf92e76ab96dc007"}}]}], "golden_chunks": [{"doc_uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "index": 1, "content": " @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n", "meta": {"hash_id": "e6135c81d5cdd028e1286c8ee4a7c090964f41d3e4603bfdaf92e76ab96dc007"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/PepperGeneratorTest.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the expected value of the pepper returned by the PepperGenerator.get() method?", "answer": "The testAlice() test method indicates that when calling the PepperGenerator.get() method, it is expected to return the value \"AlicePepper\". This is verified by the assertion:\n\nAssert.assertEquals(\"AlicePepper\", pepper);\n", "golden_doc_uuids": ["a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b"], "golden_chunk_uuids": [["a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", 1]], "golden_documents": [{"uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/PepperGeneratorTest.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "index": 0, "content": "package com.password4j;\n\nimport org.junit.Assert;\nimport org.junit.Test;\n\n\npublic class PepperGeneratorTest\n{\n\n @Test\n public void testSaltLength()\n {\n // GIVEN\n int length = 23;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n\n @Test\n public void testSaltNoLength()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.generate();\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(24, pepper.length());\n }\n\n", "meta": {"hash_id": "2791f9cb9cef36aebfdae5c23853e6dea4357d4a96f23ace8ce1512711b1ff4a"}}, {"doc_uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "index": 1, "content": " @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n", "meta": {"hash_id": "e6135c81d5cdd028e1286c8ee4a7c090964f41d3e4603bfdaf92e76ab96dc007"}}]}], "golden_chunks": [{"doc_uuid": "a7207d085190f179fc2a0f4ed97aece6f033ecdda1104e75dac8fe43844b598b", "index": 1, "content": " @Test(expected = BadParametersException.class)\n public void testSaltNegativeLength()\n {\n // GIVEN\n\n // WHEN\n PepperGenerator.generate(-3);\n\n // THEN\n\n }\n\n @Test\n public void testSaltZeroLength()\n {\n // GIVEN\n int length = 0;\n\n // WHEN\n String pepper = PepperGenerator.generate(length);\n\n // THEN\n Assert.assertNotNull(pepper);\n Assert.assertEquals(length, pepper.length());\n }\n\n @Test\n public void testAlice()\n {\n // GIVEN\n\n // WHEN\n String pepper = PepperGenerator.get();\n\n // THEN\n Assert.assertEquals(\"AlicePepper\", pepper);\n }\n}\n", "meta": {"hash_id": "e6135c81d5cdd028e1286c8ee4a7c090964f41d3e4603bfdaf92e76ab96dc007"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/PepperGeneratorTest.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the slowEquals method that takes two CharSequence objects?", "answer": "The slowEquals method that takes two CharSequence objects is used to compare them as byte arrays in length-constant time. As stated in the code comments, \"This comparison method is used so that password hashes cannot be extracted from an on-line system using a timing attack and then attacked off-line.\" It converts the CharSequences to byte arrays using Utils.fromCharSequenceToBytes(a) and Utils.fromCharSequenceToBytes(b), and then calls the slowEquals method that compares byte arrays.", "golden_doc_uuids": ["2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9"], "golden_chunk_uuids": [["2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", 1]], "golden_documents": [{"uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\n/**\n * Class in the hierarchy to avoid code duplication.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic abstract class AbstractHashingFunction implements HashingFunction\n{\n\n /**\n * Compares two {@link CharSequence}s as byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first CharSequence\n * @param b the second CharSequence\n * @return true if both {@link CharSequence}s are the same, false if not\n */\n protected static boolean slowEquals(CharSequence a, CharSequence b)\n {\n return slowEquals(Utils.fromCharSequenceToBytes(a), Utils.fromCharSequenceToBytes(b));\n }\n\n /**\n * Compares two byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first byte array\n * @param b the second byte array\n * @return true if both byte arrays are the same, false if not\n */\n protected static boolean slowEquals(byte[] a, byte[] b)\n {\n int diff = a.length ^ b.length;\n for (int i = 0; i < a.length && i < b.length; i++)\n {\n diff |= a[i] ^ b[i];\n }\n return diff == 0;\n }\n\n @Override\n public Hash hash(CharSequence plainTextPassword, String salt, CharSequence pepper)\n {\n CharSequence peppered = Utils.append(pepper, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n @Override\n public Hash hash(byte[] plainTextPassword, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n byte[] peppered = Utils.append(pepperAsBytes, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 0.1.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password as bytes array\n * @param hashed the hash as bytes array\n * @param salt the salt as bytes array used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.5.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt, CharSequence pepper)\n {\n return check(Utils.append(pepper, plainTextPassword), hashed, salt);\n }\n\n /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n return check(Utils.append(pepperAsBytes, plainTextPassword), hashed, salt);\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/AbstractHashingFunction.java", "repo_name": "Password4j/password4j", "num_chunks": 8, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\n", "meta": {"hash_id": "8daaf7794bb9e896640aa7f55c28eeb0290d85b105dab2b55b12249b92ac291f"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 1, "content": "package com.password4j;\n\n/**\n * Class in the hierarchy to avoid code duplication.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic abstract class AbstractHashingFunction implements HashingFunction\n{\n\n /**\n * Compares two {@link CharSequence}s as byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first CharSequence\n * @param b the second CharSequence\n * @return true if both {@link CharSequence}s are the same, false if not\n */\n protected static boolean slowEquals(CharSequence a, CharSequence b)\n {\n return slowEquals(Utils.fromCharSequenceToBytes(a), Utils.fromCharSequenceToBytes(b));\n }\n\n", "meta": {"hash_id": "aff73d3eec5d63bf85b6222c4bf54b721087c8a1a7b9f162a238934972869c49"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 2, "content": " /**\n * Compares two byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first byte array\n * @param b the second byte array\n * @return true if both byte arrays are the same, false if not\n */\n protected static boolean slowEquals(byte[] a, byte[] b)\n {\n int diff = a.length ^ b.length;\n for (int i = 0; i < a.length && i < b.length; i++)\n {\n diff |= a[i] ^ b[i];\n }\n return diff == 0;\n }\n\n", "meta": {"hash_id": "f3cfcc580e5cf90f207f86ad564d9e8a36439a643e713beb21791f41b16d9294"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 3, "content": " @Override\n public Hash hash(CharSequence plainTextPassword, String salt, CharSequence pepper)\n {\n CharSequence peppered = Utils.append(pepper, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n @Override\n public Hash hash(byte[] plainTextPassword, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n byte[] peppered = Utils.append(pepperAsBytes, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n", "meta": {"hash_id": "e5a589250e080b4411b0b225c741130378c8c0f18ea7fbe21ef637c44ab25321"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 4, "content": " /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 0.1.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n", "meta": {"hash_id": "b17419fe163ac9d9534aea19ff30ba9a32ba8d9a9e7a357bbca4a670acb16d30"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 5, "content": " /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password as bytes array\n * @param hashed the hash as bytes array\n * @param salt the salt as bytes array used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n", "meta": {"hash_id": "07e1a55de58d64b7ab1ee5d5988ef90b10076ebb02419eab4d63d83cfcc2e622"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 6, "content": " /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.5.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt, CharSequence pepper)\n {\n return check(Utils.append(pepper, plainTextPassword), hashed, salt);\n }\n\n", "meta": {"hash_id": "2e7de0eef1b4e908c216a5de0190816f9c0b0b9cf9d7757527052a3d9134765b"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 7, "content": " /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n return check(Utils.append(pepperAsBytes, plainTextPassword), hashed, salt);\n }\n\n}\n", "meta": {"hash_id": "e46331a02662ad75870e0daf3f2785657bab3bd8d36a7fcd08452040f07612b9"}}]}], "golden_chunks": [{"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 1, "content": "package com.password4j;\n\n/**\n * Class in the hierarchy to avoid code duplication.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic abstract class AbstractHashingFunction implements HashingFunction\n{\n\n /**\n * Compares two {@link CharSequence}s as byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first CharSequence\n * @param b the second CharSequence\n * @return true if both {@link CharSequence}s are the same, false if not\n */\n protected static boolean slowEquals(CharSequence a, CharSequence b)\n {\n return slowEquals(Utils.fromCharSequenceToBytes(a), Utils.fromCharSequenceToBytes(b));\n }\n\n", "meta": {"hash_id": "aff73d3eec5d63bf85b6222c4bf54b721087c8a1a7b9f162a238934972869c49"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/AbstractHashingFunction.java", "repo_name": "Password4j/password4j", "num_chunks": 8, "source": "codebases/jsonl"}} +{"query": "How does the hash method handle the presence or absence of a salt value?", "answer": "The hash method checks if the salt parameter is null. If it is null, it calls the hash(peppered) method, passing only the peppered password (plaintext password appended with pepper). If salt is not null, it calls the hash(peppered, salt) method, passing both the peppered password and the salt. This allows the hash method to work with or without a salt value.", "golden_doc_uuids": ["2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9"], "golden_chunk_uuids": [["2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", 3]], "golden_documents": [{"uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\n/**\n * Class in the hierarchy to avoid code duplication.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic abstract class AbstractHashingFunction implements HashingFunction\n{\n\n /**\n * Compares two {@link CharSequence}s as byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first CharSequence\n * @param b the second CharSequence\n * @return true if both {@link CharSequence}s are the same, false if not\n */\n protected static boolean slowEquals(CharSequence a, CharSequence b)\n {\n return slowEquals(Utils.fromCharSequenceToBytes(a), Utils.fromCharSequenceToBytes(b));\n }\n\n /**\n * Compares two byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first byte array\n * @param b the second byte array\n * @return true if both byte arrays are the same, false if not\n */\n protected static boolean slowEquals(byte[] a, byte[] b)\n {\n int diff = a.length ^ b.length;\n for (int i = 0; i < a.length && i < b.length; i++)\n {\n diff |= a[i] ^ b[i];\n }\n return diff == 0;\n }\n\n @Override\n public Hash hash(CharSequence plainTextPassword, String salt, CharSequence pepper)\n {\n CharSequence peppered = Utils.append(pepper, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n @Override\n public Hash hash(byte[] plainTextPassword, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n byte[] peppered = Utils.append(pepperAsBytes, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 0.1.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password as bytes array\n * @param hashed the hash as bytes array\n * @param salt the salt as bytes array used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.5.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt, CharSequence pepper)\n {\n return check(Utils.append(pepper, plainTextPassword), hashed, salt);\n }\n\n /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n return check(Utils.append(pepperAsBytes, plainTextPassword), hashed, salt);\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/AbstractHashingFunction.java", "repo_name": "Password4j/password4j", "num_chunks": 8, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\n", "meta": {"hash_id": "8daaf7794bb9e896640aa7f55c28eeb0290d85b105dab2b55b12249b92ac291f"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 1, "content": "package com.password4j;\n\n/**\n * Class in the hierarchy to avoid code duplication.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic abstract class AbstractHashingFunction implements HashingFunction\n{\n\n /**\n * Compares two {@link CharSequence}s as byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first CharSequence\n * @param b the second CharSequence\n * @return true if both {@link CharSequence}s are the same, false if not\n */\n protected static boolean slowEquals(CharSequence a, CharSequence b)\n {\n return slowEquals(Utils.fromCharSequenceToBytes(a), Utils.fromCharSequenceToBytes(b));\n }\n\n", "meta": {"hash_id": "aff73d3eec5d63bf85b6222c4bf54b721087c8a1a7b9f162a238934972869c49"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 2, "content": " /**\n * Compares two byte arrays in length-constant time. This comparison method\n * is used so that password hashes cannot be extracted from an on-line\n * system using a timing attack and then attacked off-line.\n *\n * @param a the first byte array\n * @param b the second byte array\n * @return true if both byte arrays are the same, false if not\n */\n protected static boolean slowEquals(byte[] a, byte[] b)\n {\n int diff = a.length ^ b.length;\n for (int i = 0; i < a.length && i < b.length; i++)\n {\n diff |= a[i] ^ b[i];\n }\n return diff == 0;\n }\n\n", "meta": {"hash_id": "f3cfcc580e5cf90f207f86ad564d9e8a36439a643e713beb21791f41b16d9294"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 3, "content": " @Override\n public Hash hash(CharSequence plainTextPassword, String salt, CharSequence pepper)\n {\n CharSequence peppered = Utils.append(pepper, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n @Override\n public Hash hash(byte[] plainTextPassword, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n byte[] peppered = Utils.append(pepperAsBytes, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n", "meta": {"hash_id": "e5a589250e080b4411b0b225c741130378c8c0f18ea7fbe21ef637c44ab25321"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 4, "content": " /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 0.1.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n", "meta": {"hash_id": "b17419fe163ac9d9534aea19ff30ba9a32ba8d9a9e7a357bbca4a670acb16d30"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 5, "content": " /**\n * Just calls {@link #check(CharSequence, String)} without salt\n * parameter.\n *

\n * Do not override this if the algorithm doesn't need a manually\n * provided salt.\n *\n * @param plainTextPassword the plaintext password as bytes array\n * @param hashed the hash as bytes array\n * @param salt the salt as bytes array used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt)\n {\n return check(plainTextPassword, hashed);\n }\n\n", "meta": {"hash_id": "07e1a55de58d64b7ab1ee5d5988ef90b10076ebb02419eab4d63d83cfcc2e622"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 6, "content": " /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.5.0\n */\n @Override\n public boolean check(CharSequence plainTextPassword, String hashed, String salt, CharSequence pepper)\n {\n return check(Utils.append(pepper, plainTextPassword), hashed, salt);\n }\n\n", "meta": {"hash_id": "2e7de0eef1b4e908c216a5de0190816f9c0b0b9cf9d7757527052a3d9134765b"}}, {"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 7, "content": " /**\n * Just calls {@link #check(CharSequence, String, String)}, with a prepended pepper.\n *\n * @param plainTextPassword the plaintext password\n * @param hashed the hash\n * @param salt the salt used to produce the hash\n * @return true if the hash is generated from the plaintext; false otherwise\n * @since 1.7.0\n */\n @Override\n public boolean check(byte[] plainTextPassword, byte[] hashed, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n return check(Utils.append(pepperAsBytes, plainTextPassword), hashed, salt);\n }\n\n}\n", "meta": {"hash_id": "e46331a02662ad75870e0daf3f2785657bab3bd8d36a7fcd08452040f07612b9"}}]}], "golden_chunks": [{"doc_uuid": "2c42fa36aa434372ba169a9f7cbfe3a0a0b140342bb70fe07907b93ac23f23e9", "index": 3, "content": " @Override\n public Hash hash(CharSequence plainTextPassword, String salt, CharSequence pepper)\n {\n CharSequence peppered = Utils.append(pepper, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n @Override\n public Hash hash(byte[] plainTextPassword, byte[] salt, CharSequence pepper)\n {\n byte[] pepperAsBytes = Utils.fromCharSequenceToBytes(pepper);\n byte[] peppered = Utils.append(pepperAsBytes, plainTextPassword);\n Hash result;\n if (salt == null)\n {\n result = hash(peppered);\n }\n else\n {\n result = hash(peppered, salt);\n }\n\n result.setPepper(pepper);\n return result;\n }\n\n", "meta": {"hash_id": "e5a589250e080b4411b0b225c741130378c8c0f18ea7fbe21ef637c44ab25321"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/AbstractHashingFunction.java", "repo_name": "Password4j/password4j", "num_chunks": 8, "source": "codebases/jsonl"}} +{"query": "What are the input parameters for the BalloonHashingFunction constructor?", "answer": "The BalloonHashingFunction constructor takes the following parameters:\n- hashAlgorithm: A String representing the hash algorithm to use (e.g., \"SHA-256\").\n- space: An integer representing the space cost.\n- time: An integer representing the time cost.\n- parallelism: An integer representing the parallelism factor.\n- delta: An integer representing the delta value.\nThis can be seen in the constructor definition: `public BalloonHashingFunction(String hashAlgorithm, int space, int time, int parallelism, int delta)`.", "golden_doc_uuids": ["b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad"], "golden_chunk_uuids": [["b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", 9], ["b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", 3], ["b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", 1], ["b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", 0]], "golden_documents": [{"uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n // Multiple threads\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 4, 3, \"1832bd8e5cbeba1cb174a13838095e7e66508e9bf04c40178990adbc8ba9eb6f\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 2, 3, \"f8767fe04059cef67b4427cda99bf8bcdd983959dbd399a5e63ea04523716c23\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 3, 3, \"bcad257eff3d1090b50276514857e60db5d0ec484129013ef3c88f7d36e438d6\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n };\n\n\n @Test\n public void test()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = new BalloonHashingFunction((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n }\n\n }\n\n @Test\n public void testInstance()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n Assert.assertTrue(balloonHashingFunction.check(((String) testVector[0]).getBytes(), ((String) testVector[7]).getBytes(), ((String) testVector[1]).getBytes()));\n }\n\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String m = \"SHA-256\";\n int i = 2;\n int p = 3;\n int l = 4;\n int v = 5;\n BalloonHashingFunction balloonHashingFunction = BalloonHashingFunction.getInstance(m, i, p, l, v);\n\n // THEN\n boolean eqNull = balloonHashingFunction.equals(null);\n boolean eqClass = balloonHashingFunction.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = balloonHashingFunction.equals(BalloonHashingFunction.getInstance(m, i, p, l, v));\n boolean sameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v));\n String toString = balloonHashingFunction.toString();\n int hashCode = balloonHashingFunction.hashCode();\n boolean notSameInst1 = balloonHashingFunction.equals(new BalloonHashingFunction(\"SHA-512\", i, p, l, v));\n boolean notSameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i+1, p, l, v));\n boolean notSameInst3 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p+1, l, v));\n boolean notSameInst4 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l+1, v));\n boolean notSameInst6 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v+1));\n\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/BalloonHashingFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 0, "content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n", "meta": {"hash_id": "af40848c1dc0efcd0a2d4156691f1bd2842ccc2c86aab37fc9c4556c3f3fa76f"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 1, "content": " private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n", "meta": {"hash_id": "85c2d6457ddf473de3a5f52644e9db9c85d302be436533997c18dcd63ba1bccd"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 2, "content": " // Multiple threads\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 4, 3, \"1832bd8e5cbeba1cb174a13838095e7e66508e9bf04c40178990adbc8ba9eb6f\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 2, 3, \"f8767fe04059cef67b4427cda99bf8bcdd983959dbd399a5e63ea04523716c23\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 3, 3, \"bcad257eff3d1090b50276514857e60db5d0ec484129013ef3c88f7d36e438d6\"},\n", "meta": {"hash_id": "4003b157f9c69a665fdc78804da2d3494d3ed19d6cc58455c7d18a3c5ade0fd2"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 3, "content": " new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n", "meta": {"hash_id": "8d66684082925a6677986182822b56a560f07eda563d5c2f2a3724e24ff60b92"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 4, "content": " };\n\n\n @Test\n public void test()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = new BalloonHashingFunction((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n }\n\n }\n\n @Test\n public void testInstance()\n {\n\n", "meta": {"hash_id": "44a2c27161419c688119eefac065aa485433352593d29e7bd7d82b5edb721c1a"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 5, "content": " BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n", "meta": {"hash_id": "61fa583d59b4a25230d4ef9d043fe334af4c97cde1428b395da9d201cd052a55"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 6, "content": " Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n Assert.assertTrue(balloonHashingFunction.check(((String) testVector[0]).getBytes(), ((String) testVector[7]).getBytes(), ((String) testVector[1]).getBytes()));\n }\n\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String m = \"SHA-256\";\n int i = 2;\n int p = 3;\n int l = 4;\n int v = 5;\n BalloonHashingFunction balloonHashingFunction = BalloonHashingFunction.getInstance(m, i, p, l, v);\n\n", "meta": {"hash_id": "b4140a9823115713bfcf8ea42baeed364a752e743362d8ed619e783eb5671401"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 7, "content": " // THEN\n boolean eqNull = balloonHashingFunction.equals(null);\n boolean eqClass = balloonHashingFunction.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = balloonHashingFunction.equals(BalloonHashingFunction.getInstance(m, i, p, l, v));\n boolean sameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v));\n String toString = balloonHashingFunction.toString();\n int hashCode = balloonHashingFunction.hashCode();\n boolean notSameInst1 = balloonHashingFunction.equals(new BalloonHashingFunction(\"SHA-512\", i, p, l, v));\n", "meta": {"hash_id": "ee04aecd18be25012e4d467b7c0311c833ada51047799dca5fb1896184a48b3e"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 8, "content": " boolean notSameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i+1, p, l, v));\n boolean notSameInst3 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p+1, l, v));\n boolean notSameInst4 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l+1, v));\n boolean notSameInst6 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v+1));\n\n", "meta": {"hash_id": "4a3f4afae4a30fc87ee97c3e45639bf85df026912bb3106dcaa3b788aae04179"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 9, "content": " // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n", "meta": {"hash_id": "e28126555538a2d83d9408547d8812c647f16fa1136a2d2c6417c235827f14b1"}}]}], "golden_chunks": [{"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 9, "content": " // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n", "meta": {"hash_id": "e28126555538a2d83d9408547d8812c647f16fa1136a2d2c6417c235827f14b1"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 3, "content": " new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n", "meta": {"hash_id": "8d66684082925a6677986182822b56a560f07eda563d5c2f2a3724e24ff60b92"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 1, "content": " private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n", "meta": {"hash_id": "85c2d6457ddf473de3a5f52644e9db9c85d302be436533997c18dcd63ba1bccd"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 0, "content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n", "meta": {"hash_id": "af40848c1dc0efcd0a2d4156691f1bd2842ccc2c86aab37fc9c4556c3f3fa76f"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/BalloonHashingFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "How can I obtain an instance of the BalloonHashingFunction using the factory method?", "answer": "You can obtain an instance of the BalloonHashingFunction using the `getInstance` factory method. It takes the same parameters as the constructor:\n`BalloonHashingFunction.getInstance(String hashAlgorithm, int space, int time, int parallelism, int delta)`\nThis is demonstrated in the `testInstance` method of the provided test class.", "golden_doc_uuids": ["b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad"], "golden_chunk_uuids": [["b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", 5]], "golden_documents": [{"uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n // Multiple threads\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 4, 3, \"1832bd8e5cbeba1cb174a13838095e7e66508e9bf04c40178990adbc8ba9eb6f\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 2, 3, \"f8767fe04059cef67b4427cda99bf8bcdd983959dbd399a5e63ea04523716c23\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 3, 3, \"bcad257eff3d1090b50276514857e60db5d0ec484129013ef3c88f7d36e438d6\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n };\n\n\n @Test\n public void test()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = new BalloonHashingFunction((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n }\n\n }\n\n @Test\n public void testInstance()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n Assert.assertTrue(balloonHashingFunction.check(((String) testVector[0]).getBytes(), ((String) testVector[7]).getBytes(), ((String) testVector[1]).getBytes()));\n }\n\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String m = \"SHA-256\";\n int i = 2;\n int p = 3;\n int l = 4;\n int v = 5;\n BalloonHashingFunction balloonHashingFunction = BalloonHashingFunction.getInstance(m, i, p, l, v);\n\n // THEN\n boolean eqNull = balloonHashingFunction.equals(null);\n boolean eqClass = balloonHashingFunction.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = balloonHashingFunction.equals(BalloonHashingFunction.getInstance(m, i, p, l, v));\n boolean sameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v));\n String toString = balloonHashingFunction.toString();\n int hashCode = balloonHashingFunction.hashCode();\n boolean notSameInst1 = balloonHashingFunction.equals(new BalloonHashingFunction(\"SHA-512\", i, p, l, v));\n boolean notSameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i+1, p, l, v));\n boolean notSameInst3 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p+1, l, v));\n boolean notSameInst4 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l+1, v));\n boolean notSameInst6 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v+1));\n\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/BalloonHashingFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 0, "content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n", "meta": {"hash_id": "af40848c1dc0efcd0a2d4156691f1bd2842ccc2c86aab37fc9c4556c3f3fa76f"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 1, "content": " private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n", "meta": {"hash_id": "85c2d6457ddf473de3a5f52644e9db9c85d302be436533997c18dcd63ba1bccd"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 2, "content": " // Multiple threads\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 4, 3, \"1832bd8e5cbeba1cb174a13838095e7e66508e9bf04c40178990adbc8ba9eb6f\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 2, 3, \"f8767fe04059cef67b4427cda99bf8bcdd983959dbd399a5e63ea04523716c23\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 3, 3, \"bcad257eff3d1090b50276514857e60db5d0ec484129013ef3c88f7d36e438d6\"},\n", "meta": {"hash_id": "4003b157f9c69a665fdc78804da2d3494d3ed19d6cc58455c7d18a3c5ade0fd2"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 3, "content": " new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n", "meta": {"hash_id": "8d66684082925a6677986182822b56a560f07eda563d5c2f2a3724e24ff60b92"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 4, "content": " };\n\n\n @Test\n public void test()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = new BalloonHashingFunction((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n }\n\n }\n\n @Test\n public void testInstance()\n {\n\n", "meta": {"hash_id": "44a2c27161419c688119eefac065aa485433352593d29e7bd7d82b5edb721c1a"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 5, "content": " BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n", "meta": {"hash_id": "61fa583d59b4a25230d4ef9d043fe334af4c97cde1428b395da9d201cd052a55"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 6, "content": " Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n Assert.assertTrue(balloonHashingFunction.check(((String) testVector[0]).getBytes(), ((String) testVector[7]).getBytes(), ((String) testVector[1]).getBytes()));\n }\n\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String m = \"SHA-256\";\n int i = 2;\n int p = 3;\n int l = 4;\n int v = 5;\n BalloonHashingFunction balloonHashingFunction = BalloonHashingFunction.getInstance(m, i, p, l, v);\n\n", "meta": {"hash_id": "b4140a9823115713bfcf8ea42baeed364a752e743362d8ed619e783eb5671401"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 7, "content": " // THEN\n boolean eqNull = balloonHashingFunction.equals(null);\n boolean eqClass = balloonHashingFunction.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = balloonHashingFunction.equals(BalloonHashingFunction.getInstance(m, i, p, l, v));\n boolean sameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v));\n String toString = balloonHashingFunction.toString();\n int hashCode = balloonHashingFunction.hashCode();\n boolean notSameInst1 = balloonHashingFunction.equals(new BalloonHashingFunction(\"SHA-512\", i, p, l, v));\n", "meta": {"hash_id": "ee04aecd18be25012e4d467b7c0311c833ada51047799dca5fb1896184a48b3e"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 8, "content": " boolean notSameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i+1, p, l, v));\n boolean notSameInst3 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p+1, l, v));\n boolean notSameInst4 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l+1, v));\n boolean notSameInst6 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v+1));\n\n", "meta": {"hash_id": "4a3f4afae4a30fc87ee97c3e45639bf85df026912bb3106dcaa3b788aae04179"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 9, "content": " // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n", "meta": {"hash_id": "e28126555538a2d83d9408547d8812c647f16fa1136a2d2c6417c235827f14b1"}}]}], "golden_chunks": [{"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 5, "content": " BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n", "meta": {"hash_id": "61fa583d59b4a25230d4ef9d043fe334af4c97cde1428b395da9d201cd052a55"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/BalloonHashingFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "What are the test vectors used in the `TEST_VECTORS` array?", "answer": "The `TEST_VECTORS` array contains a set of test vectors for the BalloonHashingFunction. Each test vector is an array of objects with the following elements:\n- password: A String representing the password to hash.\n- salt: A String representing the salt value.\n- hashAlgorithm: A String representing the hash algorithm to use (e.g., \"SHA-256\").\n- space: An integer representing the space cost.\n- time: An integer representing the time cost.\n- parallelism: An integer representing the parallelism factor.\n- delta: An integer representing the delta value.\n- expectedHash: A String representing the expected hash value for the given input parameters.\nThese test vectors are used in the `test` and `testInstance` methods to verify the correctness of the BalloonHashingFunction implementation.", "golden_doc_uuids": ["b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad"], "golden_chunk_uuids": [["b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", 1]], "golden_documents": [{"uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n // Multiple threads\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 4, 3, \"1832bd8e5cbeba1cb174a13838095e7e66508e9bf04c40178990adbc8ba9eb6f\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 2, 3, \"f8767fe04059cef67b4427cda99bf8bcdd983959dbd399a5e63ea04523716c23\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 3, 3, \"bcad257eff3d1090b50276514857e60db5d0ec484129013ef3c88f7d36e438d6\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n };\n\n\n @Test\n public void test()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = new BalloonHashingFunction((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n }\n\n }\n\n @Test\n public void testInstance()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n Assert.assertTrue(balloonHashingFunction.check(((String) testVector[0]).getBytes(), ((String) testVector[7]).getBytes(), ((String) testVector[1]).getBytes()));\n }\n\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String m = \"SHA-256\";\n int i = 2;\n int p = 3;\n int l = 4;\n int v = 5;\n BalloonHashingFunction balloonHashingFunction = BalloonHashingFunction.getInstance(m, i, p, l, v);\n\n // THEN\n boolean eqNull = balloonHashingFunction.equals(null);\n boolean eqClass = balloonHashingFunction.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = balloonHashingFunction.equals(BalloonHashingFunction.getInstance(m, i, p, l, v));\n boolean sameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v));\n String toString = balloonHashingFunction.toString();\n int hashCode = balloonHashingFunction.hashCode();\n boolean notSameInst1 = balloonHashingFunction.equals(new BalloonHashingFunction(\"SHA-512\", i, p, l, v));\n boolean notSameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i+1, p, l, v));\n boolean notSameInst3 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p+1, l, v));\n boolean notSameInst4 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l+1, v));\n boolean notSameInst6 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v+1));\n\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/BalloonHashingFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 0, "content": "/*\n * (C) Copyright 2023 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\n\npackage com.password4j;\n\nimport com.password4j.types.Argon2;\nimport com.password4j.types.Bcrypt;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\n\npublic class BalloonHashingFunctionTest\n{\n\n", "meta": {"hash_id": "af40848c1dc0efcd0a2d4156691f1bd2842ccc2c86aab37fc9c4556c3f3fa76f"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 1, "content": " private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n", "meta": {"hash_id": "85c2d6457ddf473de3a5f52644e9db9c85d302be436533997c18dcd63ba1bccd"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 2, "content": " // Multiple threads\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 4, 3, \"1832bd8e5cbeba1cb174a13838095e7e66508e9bf04c40178990adbc8ba9eb6f\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 2, 3, \"f8767fe04059cef67b4427cda99bf8bcdd983959dbd399a5e63ea04523716c23\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 3, 3, \"bcad257eff3d1090b50276514857e60db5d0ec484129013ef3c88f7d36e438d6\"},\n", "meta": {"hash_id": "4003b157f9c69a665fdc78804da2d3494d3ed19d6cc58455c7d18a3c5ade0fd2"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 3, "content": " new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 1, 3, \"498344ee9d31baf82cc93ebb3874fe0b76e164302c1cefa1b63a90a69afb9b4d\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 4, 3, \"8a665611e40710ba1fd78c181549c750f17c12e423c11930ce997f04c7153e0c\"},\n new Object[]{\"\\000\", \"\\000\", \"SHA-256\", 3, 3, 1, 3, \"d9e33c683451b21fb3720afbd78bf12518c1d4401fa39f054b052a145c968bb1\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 16, 3, \"a67b383bb88a282aef595d98697f90820adf64582a4b3627c76b7da3d8bae915\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 1, 3, \"97a11df9382a788c781929831d409d3599e0b67ab452ef834718114efdcd1c6d\"},\n\n", "meta": {"hash_id": "8d66684082925a6677986182822b56a560f07eda563d5c2f2a3724e24ff60b92"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 4, "content": " };\n\n\n @Test\n public void test()\n {\n\n BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = new BalloonHashingFunction((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n\n Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n }\n\n }\n\n @Test\n public void testInstance()\n {\n\n", "meta": {"hash_id": "44a2c27161419c688119eefac065aa485433352593d29e7bd7d82b5edb721c1a"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 5, "content": " BalloonHashingFunction balloonHashingFunction;\n for (Object[] testVector : TEST_VECTORS)\n {\n balloonHashingFunction = BalloonHashingFunction.getInstance((String) testVector[2], (Integer) testVector[3], (Integer) testVector[4], (Integer) testVector[5], (Integer) testVector[6]);\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash((String) testVector[0], (String) testVector[1]).getResult());\n Assert.assertEquals(testVector[7], balloonHashingFunction.hash(((String) testVector[0]).getBytes(), ((String) testVector[1]).getBytes()).getResult());\n\n", "meta": {"hash_id": "61fa583d59b4a25230d4ef9d043fe334af4c97cde1428b395da9d201cd052a55"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 6, "content": " Assert.assertTrue(balloonHashingFunction.check((String) testVector[0], (String) testVector[7], (String) testVector[1]));\n Assert.assertTrue(balloonHashingFunction.check(((String) testVector[0]).getBytes(), ((String) testVector[7]).getBytes(), ((String) testVector[1]).getBytes()));\n }\n\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String m = \"SHA-256\";\n int i = 2;\n int p = 3;\n int l = 4;\n int v = 5;\n BalloonHashingFunction balloonHashingFunction = BalloonHashingFunction.getInstance(m, i, p, l, v);\n\n", "meta": {"hash_id": "b4140a9823115713bfcf8ea42baeed364a752e743362d8ed619e783eb5671401"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 7, "content": " // THEN\n boolean eqNull = balloonHashingFunction.equals(null);\n boolean eqClass = balloonHashingFunction.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = balloonHashingFunction.equals(BalloonHashingFunction.getInstance(m, i, p, l, v));\n boolean sameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v));\n String toString = balloonHashingFunction.toString();\n int hashCode = balloonHashingFunction.hashCode();\n boolean notSameInst1 = balloonHashingFunction.equals(new BalloonHashingFunction(\"SHA-512\", i, p, l, v));\n", "meta": {"hash_id": "ee04aecd18be25012e4d467b7c0311c833ada51047799dca5fb1896184a48b3e"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 8, "content": " boolean notSameInst2 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i+1, p, l, v));\n boolean notSameInst3 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p+1, l, v));\n boolean notSameInst4 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l+1, v));\n boolean notSameInst6 = balloonHashingFunction.equals(new BalloonHashingFunction(m, i, p, l, v+1));\n\n", "meta": {"hash_id": "4a3f4afae4a30fc87ee97c3e45639bf85df026912bb3106dcaa3b788aae04179"}}, {"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 9, "content": " // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new BalloonHashingFunction(m, i+1, p, l, v).toString());\n Assert.assertNotEquals(hashCode, new BalloonHashingFunction(m, i, p, l, v+1).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n Assert.assertFalse(notSameInst3);\n Assert.assertFalse(notSameInst4);\n Assert.assertFalse(notSameInst6);\n }\n\n}\n", "meta": {"hash_id": "e28126555538a2d83d9408547d8812c647f16fa1136a2d2c6417c235827f14b1"}}]}], "golden_chunks": [{"doc_uuid": "b80798210bd12005120f16aa5e3903f550597521ed42ea5f67a050954fbd78ad", "index": 1, "content": " private static final Object[][] TEST_VECTORS = new Object[][]{\n // Single thread\n new Object[]{\"hunter42\", \"examplesalt\", \"SHA-256\", 1024, 3, 0, 3, \"716043dff777b44aa7b88dcbab12c078abecfac9d289c5b5195967aa63440dfb\"},\n new Object[]{\"\", \"salt\", \"SHA-256\", 3, 3, 0, 3, \"5f02f8206f9cd212485c6bdf85527b698956701ad0852106f94b94ee94577378\"},\n new Object[]{\"password\", \"\", \"SHA-256\", 3, 3, 0, 3, \"20aa99d7fe3f4df4bd98c655c5480ec98b143107a331fd491deda885c4d6a6cc\"},\n new Object[]{\"\\0\", \"\\0\", \"SHA-256\", 3, 3, 0, 3, \"4fc7e302ffa29ae0eac31166cee7a552d1d71135f4e0da66486fb68a749b73a4\"},\n new Object[]{\"password\", \"salt\", \"SHA-256\", 1, 1, 0, 3, \"eefda4a8a75b461fa389c1dcfaf3e9dfacbc26f81f22e6f280d15cc18c417545\"},\n\n", "meta": {"hash_id": "85c2d6457ddf473de3a5f52644e9db9c85d302be436533997c18dcd63ba1bccd"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/BalloonHashingFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "What is the maximum digest size supported by this Blake2b implementation?", "answer": "The maximum digest size supported is 64 bytes. This can be seen in the constructor:\n\nif (digestSize < 1 || digestSize > 64)\n{\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n}\n", "golden_doc_uuids": ["cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25"], "golden_chunk_uuids": [["cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", 3]], "golden_documents": [{"uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\n\n\nclass Blake2b\n{\n private static final long[] IV = { 0x6a09e667f3bcc908L, 0xbb67ae8584caa73bL, 0x3c6ef372fe94f82bL, 0xa54ff53a5f1d36f1L,\n 0x510e527fade682d1L, 0x9b05688c2b3e6c1fL, 0x1f83d9abfb41bd6bL, 0x5be0cd19137e2179L };\n\n private static final byte[][] SIGMA = { { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 }, { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },\n { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 }, { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },\n { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 }, { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },\n { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 }, { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },\n { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 } };\n\n private static final int ROUNDS = 12;\n\n private static final int BLOCK_LENGTH_BYTES = 128;\n\n private final int digestLength;\n\n private final int keyLength;\n\n private final byte[] buffer;\n\n private final long[] internalState = new long[16];\n\n private int bufferPos = 0;\n\n private long[] chainValue = null;\n\n private long t0 = 0L;\n\n private long t1 = 0L;\n\n private long f0 = 0L;\n\n /**\n * Basic sized constructor - size in bytes.\n *\n * @param digestSize size of the digest in bytes\n */\n Blake2b(int digestSize)\n {\n if (digestSize < 1 || digestSize > 64)\n {\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n }\n\n buffer = new byte[BLOCK_LENGTH_BYTES];\n keyLength = 0;\n this.digestLength = digestSize;\n init();\n }\n\n // initialize chainValue\n private void init()\n {\n chainValue = new long[8];\n chainValue[0] = IV[0] ^ (digestLength | ((long) keyLength << 8) | 0x1010000);\n chainValue[1] = IV[1];\n chainValue[2] = IV[2];\n chainValue[3] = IV[3];\n chainValue[4] = IV[4];\n chainValue[5] = IV[5];\n chainValue[6] = IV[6];\n chainValue[7] = IV[7];\n }\n\n private void initializeInternalState()\n {\n System.arraycopy(chainValue, 0, internalState, 0, chainValue.length);\n System.arraycopy(IV, 0, internalState, chainValue.length, 4);\n internalState[12] = t0 ^ IV[4];\n internalState[13] = t1 ^ IV[5];\n internalState[14] = f0 ^ IV[6];\n internalState[15] = IV[7];// ^ f1 with f1 = 0\n }\n\n void update(byte[] message)\n {\n if (message == null)\n {\n return;\n }\n update(message, 0, message.length);\n }\n\n /**\n * update the message digest with a block of bytes.\n *\n * @param message the byte array containing the data.\n * @param offset the offset into the byte array where the data starts.\n * @param len the length of the data.\n */\n void update(byte[] message, int offset, int len)\n {\n int remainingLength = 0;\n\n if (bufferPos != 0)\n {\n remainingLength = BLOCK_LENGTH_BYTES - bufferPos;\n if (remainingLength < len)\n {\n System.arraycopy(message, offset, buffer, bufferPos, remainingLength);\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n bufferPos = 0;\n Arrays.fill(buffer, (byte) 0);// clear buffer\n }\n else\n {\n System.arraycopy(message, offset, buffer, bufferPos, len);\n bufferPos += len;\n return;\n }\n }\n\n int messagePos;\n int blockWiseLastPos = offset + len - BLOCK_LENGTH_BYTES;\n for (messagePos = offset + remainingLength; messagePos < blockWiseLastPos; messagePos += BLOCK_LENGTH_BYTES)\n {\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(message, messagePos);\n }\n\n // fill the buffer with left bytes, this might be a full block\n System.arraycopy(message, messagePos, buffer, 0, offset + len - messagePos);\n bufferPos += offset + len - messagePos;\n }\n\n /**\n * close the digest, producing the final digest value. The doFinal\n * call leaves the digest reset.\n * Key, salt and personal string remain.\n *\n * @param out the array the digest is to be copied into.\n * @param outOffset the offset into the out array the digest is to start at.\n */\n void doFinal(byte[] out, int outOffset)\n {\n\n f0 = 0xFFFFFFFFFFFFFFFFL;\n t0 += bufferPos;\n if (bufferPos > 0 && t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n Arrays.fill(buffer, (byte) 0);// Holds eventually the key if input is null\n Arrays.fill(internalState, 0L);\n\n for (int i = 0; i < chainValue.length && (i * 8 < digestLength); i++)\n {\n byte[] bytes = Utils.longToLittleEndian(chainValue[i]);\n\n if (i * 8 < digestLength - 8)\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, 8);\n }\n else\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, digestLength - (i * 8));\n }\n }\n\n Arrays.fill(chainValue, 0L);\n\n reset();\n }\n\n /**\n * Reset the digest back to it's initial state.\n * The key, the salt and the personal string will\n * remain for further computations.\n */\n void reset()\n {\n bufferPos = 0;\n f0 = 0L;\n t0 = 0L;\n t1 = 0L;\n chainValue = null;\n Arrays.fill(buffer, (byte) 0);\n init();\n }\n\n private void compress(byte[] message, int messagePos)\n {\n\n initializeInternalState();\n\n long[] m = new long[16];\n for (int j = 0; j < 16; j++)\n {\n m[j] = Utils.littleEndianToLong(message, messagePos + j * 8);\n }\n\n for (int round = 0; round < ROUNDS; round++)\n {\n\n // G apply to columns of internalState:m[blake2b_sigma[round][2 *\n // blockPos]] /+1\n functionG(m[SIGMA[round][0]], m[SIGMA[round][1]], 0, 4, 8, 12);\n functionG(m[SIGMA[round][2]], m[SIGMA[round][3]], 1, 5, 9, 13);\n functionG(m[SIGMA[round][4]], m[SIGMA[round][5]], 2, 6, 10, 14);\n functionG(m[SIGMA[round][6]], m[SIGMA[round][7]], 3, 7, 11, 15);\n // G apply to diagonals of internalState:\n functionG(m[SIGMA[round][8]], m[SIGMA[round][9]], 0, 5, 10, 15);\n functionG(m[SIGMA[round][10]], m[SIGMA[round][11]], 1, 6, 11, 12);\n functionG(m[SIGMA[round][12]], m[SIGMA[round][13]], 2, 7, 8, 13);\n functionG(m[SIGMA[round][14]], m[SIGMA[round][15]], 3, 4, 9, 14);\n }\n\n // update chain values:\n for (int offset = 0; offset < chainValue.length; offset++)\n {\n chainValue[offset] = chainValue[offset] ^ internalState[offset] ^ internalState[offset + 8];\n }\n }\n\n private void functionG(long m1, long m2, int posA, int posB, int posC, int posD)\n {\n\n internalState[posA] = internalState[posA] + internalState[posB] + m1;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 32);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 24); // replaces 25 of BLAKE\n internalState[posA] = internalState[posA] + internalState[posB] + m2;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 16);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 63); // replaces 11 of BLAKE\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/Blake2b.java", "repo_name": "Password4j/password4j", "num_chunks": 17, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\n\n\nclass Blake2b\n{\n private static final long[] IV = { 0x6a09e667f3bcc908L, 0xbb67ae8584caa73bL, 0x3c6ef372fe94f82bL, 0xa54ff53a5f1d36f1L,\n 0x510e527fade682d1L, 0x9b05688c2b3e6c1fL, 0x1f83d9abfb41bd6bL, 0x5be0cd19137e2179L };\n\n", "meta": {"hash_id": "030f05fa092404cbb0bf8443b12cb112715b629135d50a6336ccebf563a0c2c0"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 1, "content": " private static final byte[][] SIGMA = { { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 }, { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },\n { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 }, { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },\n { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 }, { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },\n { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 }, { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },\n { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 } };\n\n", "meta": {"hash_id": "7ac872c21aa7697594c7032f159ff0d3b481ba3598d7bb6804292c11d79b4003"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 2, "content": " private static final int ROUNDS = 12;\n\n private static final int BLOCK_LENGTH_BYTES = 128;\n\n private final int digestLength;\n\n private final int keyLength;\n\n private final byte[] buffer;\n\n private final long[] internalState = new long[16];\n\n private int bufferPos = 0;\n\n private long[] chainValue = null;\n\n private long t0 = 0L;\n\n private long t1 = 0L;\n\n private long f0 = 0L;\n\n", "meta": {"hash_id": "868fb873412b06a4193016e41f4ab1d48bb645090abbc4c075c3b87580824632"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 3, "content": " /**\n * Basic sized constructor - size in bytes.\n *\n * @param digestSize size of the digest in bytes\n */\n Blake2b(int digestSize)\n {\n if (digestSize < 1 || digestSize > 64)\n {\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n }\n\n buffer = new byte[BLOCK_LENGTH_BYTES];\n keyLength = 0;\n this.digestLength = digestSize;\n init();\n }\n\n", "meta": {"hash_id": "487cd37a4741b2d8bb79276c84327a44c03f02ea01ca93cb0f3c078224a1b532"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 4, "content": " // initialize chainValue\n private void init()\n {\n chainValue = new long[8];\n chainValue[0] = IV[0] ^ (digestLength | ((long) keyLength << 8) | 0x1010000);\n chainValue[1] = IV[1];\n chainValue[2] = IV[2];\n chainValue[3] = IV[3];\n chainValue[4] = IV[4];\n chainValue[5] = IV[5];\n chainValue[6] = IV[6];\n chainValue[7] = IV[7];\n }\n\n", "meta": {"hash_id": "2864c1ca855f95ad345484d35073ff779f0e627ffd14fbe01e62dd3a4b041149"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 5, "content": " private void initializeInternalState()\n {\n System.arraycopy(chainValue, 0, internalState, 0, chainValue.length);\n System.arraycopy(IV, 0, internalState, chainValue.length, 4);\n internalState[12] = t0 ^ IV[4];\n internalState[13] = t1 ^ IV[5];\n internalState[14] = f0 ^ IV[6];\n internalState[15] = IV[7];// ^ f1 with f1 = 0\n }\n\n", "meta": {"hash_id": "6af50bcad7c7162586aab5f53314a8fdb4ce66a2767266bf8467e9d676348707"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 6, "content": " void update(byte[] message)\n {\n if (message == null)\n {\n return;\n }\n update(message, 0, message.length);\n }\n\n /**\n * update the message digest with a block of bytes.\n *\n * @param message the byte array containing the data.\n * @param offset the offset into the byte array where the data starts.\n * @param len the length of the data.\n */\n void update(byte[] message, int offset, int len)\n {\n int remainingLength = 0;\n\n", "meta": {"hash_id": "519cf7f6ee589c0e7c4b1bbe1339a7371e556810f3421240c7c9fede8eefb1f7"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 7, "content": " if (bufferPos != 0)\n {\n remainingLength = BLOCK_LENGTH_BYTES - bufferPos;\n if (remainingLength < len)\n {\n System.arraycopy(message, offset, buffer, bufferPos, remainingLength);\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n bufferPos = 0;\n Arrays.fill(buffer, (byte) 0);// clear buffer\n }\n else\n {\n System.arraycopy(message, offset, buffer, bufferPos, len);\n bufferPos += len;\n return;\n }\n }\n\n", "meta": {"hash_id": "414938084cea2f5335cfd48f8b3c7a62b1a710f908003424757efe3a1758dc4a"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 8, "content": " int messagePos;\n int blockWiseLastPos = offset + len - BLOCK_LENGTH_BYTES;\n for (messagePos = offset + remainingLength; messagePos < blockWiseLastPos; messagePos += BLOCK_LENGTH_BYTES)\n {\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(message, messagePos);\n }\n\n", "meta": {"hash_id": "23eaff22bb9f798e6e64a870209a0cbbdbd2e8539ed76f33a57767163645bbfa"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 9, "content": " // fill the buffer with left bytes, this might be a full block\n System.arraycopy(message, messagePos, buffer, 0, offset + len - messagePos);\n bufferPos += offset + len - messagePos;\n }\n\n /**\n * close the digest, producing the final digest value. The doFinal\n * call leaves the digest reset.\n * Key, salt and personal string remain.\n *\n * @param out the array the digest is to be copied into.\n * @param outOffset the offset into the out array the digest is to start at.\n */\n void doFinal(byte[] out, int outOffset)\n {\n\n", "meta": {"hash_id": "a006b54a79da16c523aa73e4acfaf0af027b702d53d04ff4f06f6771ed7a7aeb"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 10, "content": " f0 = 0xFFFFFFFFFFFFFFFFL;\n t0 += bufferPos;\n if (bufferPos > 0 && t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n Arrays.fill(buffer, (byte) 0);// Holds eventually the key if input is null\n Arrays.fill(internalState, 0L);\n\n for (int i = 0; i < chainValue.length && (i * 8 < digestLength); i++)\n {\n byte[] bytes = Utils.longToLittleEndian(chainValue[i]);\n\n", "meta": {"hash_id": "bb76de8510c375643cb6e697a9ae1da10ab32439ba52f226329138d0cdf800fd"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 11, "content": " if (i * 8 < digestLength - 8)\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, 8);\n }\n else\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, digestLength - (i * 8));\n }\n }\n\n Arrays.fill(chainValue, 0L);\n\n reset();\n }\n\n", "meta": {"hash_id": "a41730ca45c1a08b287c459887ed58052cba41f3ff4aaa6c8e6ece0d68d47901"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 12, "content": " /**\n * Reset the digest back to it's initial state.\n * The key, the salt and the personal string will\n * remain for further computations.\n */\n void reset()\n {\n bufferPos = 0;\n f0 = 0L;\n t0 = 0L;\n t1 = 0L;\n chainValue = null;\n Arrays.fill(buffer, (byte) 0);\n init();\n }\n\n", "meta": {"hash_id": "cf471747123c399c4bb07734a395db899cbede902c8556b730408612fa1a5232"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 13, "content": " private void compress(byte[] message, int messagePos)\n {\n\n initializeInternalState();\n\n long[] m = new long[16];\n for (int j = 0; j < 16; j++)\n {\n m[j] = Utils.littleEndianToLong(message, messagePos + j * 8);\n }\n\n for (int round = 0; round < ROUNDS; round++)\n {\n\n", "meta": {"hash_id": "b255faa394dbfe3b190c4dda7a092d4691dd7e4803e962568114baa8f15e53cc"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 14, "content": " // G apply to columns of internalState:m[blake2b_sigma[round][2 *\n // blockPos]] /+1\n functionG(m[SIGMA[round][0]], m[SIGMA[round][1]], 0, 4, 8, 12);\n functionG(m[SIGMA[round][2]], m[SIGMA[round][3]], 1, 5, 9, 13);\n functionG(m[SIGMA[round][4]], m[SIGMA[round][5]], 2, 6, 10, 14);\n functionG(m[SIGMA[round][6]], m[SIGMA[round][7]], 3, 7, 11, 15);\n // G apply to diagonals of internalState:\n functionG(m[SIGMA[round][8]], m[SIGMA[round][9]], 0, 5, 10, 15);\n functionG(m[SIGMA[round][10]], m[SIGMA[round][11]], 1, 6, 11, 12);\n functionG(m[SIGMA[round][12]], m[SIGMA[round][13]], 2, 7, 8, 13);\n functionG(m[SIGMA[round][14]], m[SIGMA[round][15]], 3, 4, 9, 14);\n }\n\n", "meta": {"hash_id": "c62b55ab9d065eeaa641328c238bd250315fc5ecd20e5e7e33bf3f140faf4d4b"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 15, "content": " // update chain values:\n for (int offset = 0; offset < chainValue.length; offset++)\n {\n chainValue[offset] = chainValue[offset] ^ internalState[offset] ^ internalState[offset + 8];\n }\n }\n\n private void functionG(long m1, long m2, int posA, int posB, int posC, int posD)\n {\n\n", "meta": {"hash_id": "eafb4bb7590170198f6cec0d051c9221458c1962d3a0fb1dce4e837787ada236"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 16, "content": " internalState[posA] = internalState[posA] + internalState[posB] + m1;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 32);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 24); // replaces 25 of BLAKE\n internalState[posA] = internalState[posA] + internalState[posB] + m2;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 16);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 63); // replaces 11 of BLAKE\n }\n}\n", "meta": {"hash_id": "c5e9d3fb067fd14d929348bfeb795af6cf6cf3415c60772bc701b84cf5e2e959"}}]}], "golden_chunks": [{"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 3, "content": " /**\n * Basic sized constructor - size in bytes.\n *\n * @param digestSize size of the digest in bytes\n */\n Blake2b(int digestSize)\n {\n if (digestSize < 1 || digestSize > 64)\n {\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n }\n\n buffer = new byte[BLOCK_LENGTH_BYTES];\n keyLength = 0;\n this.digestLength = digestSize;\n init();\n }\n\n", "meta": {"hash_id": "487cd37a4741b2d8bb79276c84327a44c03f02ea01ca93cb0f3c078224a1b532"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/Blake2b.java", "repo_name": "Password4j/password4j", "num_chunks": 17, "source": "codebases/jsonl"}} +{"query": "How do you reset the hasher to its initial state?", "answer": "To reset the hasher to its initial state, call the `reset` method on the Blake2b instance:\n\nblake2b.reset();\n\nThis will clear the internal buffers and state, preparing the hasher for a new hashing operation. Any previously configured parameters such as the key, salt, and personal string will remain intact for further computations.", "golden_doc_uuids": ["cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25"], "golden_chunk_uuids": [["cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", 11], ["cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", 10], ["cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", 7], ["cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", 3]], "golden_documents": [{"uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\n\n\nclass Blake2b\n{\n private static final long[] IV = { 0x6a09e667f3bcc908L, 0xbb67ae8584caa73bL, 0x3c6ef372fe94f82bL, 0xa54ff53a5f1d36f1L,\n 0x510e527fade682d1L, 0x9b05688c2b3e6c1fL, 0x1f83d9abfb41bd6bL, 0x5be0cd19137e2179L };\n\n private static final byte[][] SIGMA = { { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 }, { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },\n { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 }, { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },\n { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 }, { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },\n { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 }, { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },\n { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 } };\n\n private static final int ROUNDS = 12;\n\n private static final int BLOCK_LENGTH_BYTES = 128;\n\n private final int digestLength;\n\n private final int keyLength;\n\n private final byte[] buffer;\n\n private final long[] internalState = new long[16];\n\n private int bufferPos = 0;\n\n private long[] chainValue = null;\n\n private long t0 = 0L;\n\n private long t1 = 0L;\n\n private long f0 = 0L;\n\n /**\n * Basic sized constructor - size in bytes.\n *\n * @param digestSize size of the digest in bytes\n */\n Blake2b(int digestSize)\n {\n if (digestSize < 1 || digestSize > 64)\n {\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n }\n\n buffer = new byte[BLOCK_LENGTH_BYTES];\n keyLength = 0;\n this.digestLength = digestSize;\n init();\n }\n\n // initialize chainValue\n private void init()\n {\n chainValue = new long[8];\n chainValue[0] = IV[0] ^ (digestLength | ((long) keyLength << 8) | 0x1010000);\n chainValue[1] = IV[1];\n chainValue[2] = IV[2];\n chainValue[3] = IV[3];\n chainValue[4] = IV[4];\n chainValue[5] = IV[5];\n chainValue[6] = IV[6];\n chainValue[7] = IV[7];\n }\n\n private void initializeInternalState()\n {\n System.arraycopy(chainValue, 0, internalState, 0, chainValue.length);\n System.arraycopy(IV, 0, internalState, chainValue.length, 4);\n internalState[12] = t0 ^ IV[4];\n internalState[13] = t1 ^ IV[5];\n internalState[14] = f0 ^ IV[6];\n internalState[15] = IV[7];// ^ f1 with f1 = 0\n }\n\n void update(byte[] message)\n {\n if (message == null)\n {\n return;\n }\n update(message, 0, message.length);\n }\n\n /**\n * update the message digest with a block of bytes.\n *\n * @param message the byte array containing the data.\n * @param offset the offset into the byte array where the data starts.\n * @param len the length of the data.\n */\n void update(byte[] message, int offset, int len)\n {\n int remainingLength = 0;\n\n if (bufferPos != 0)\n {\n remainingLength = BLOCK_LENGTH_BYTES - bufferPos;\n if (remainingLength < len)\n {\n System.arraycopy(message, offset, buffer, bufferPos, remainingLength);\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n bufferPos = 0;\n Arrays.fill(buffer, (byte) 0);// clear buffer\n }\n else\n {\n System.arraycopy(message, offset, buffer, bufferPos, len);\n bufferPos += len;\n return;\n }\n }\n\n int messagePos;\n int blockWiseLastPos = offset + len - BLOCK_LENGTH_BYTES;\n for (messagePos = offset + remainingLength; messagePos < blockWiseLastPos; messagePos += BLOCK_LENGTH_BYTES)\n {\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(message, messagePos);\n }\n\n // fill the buffer with left bytes, this might be a full block\n System.arraycopy(message, messagePos, buffer, 0, offset + len - messagePos);\n bufferPos += offset + len - messagePos;\n }\n\n /**\n * close the digest, producing the final digest value. The doFinal\n * call leaves the digest reset.\n * Key, salt and personal string remain.\n *\n * @param out the array the digest is to be copied into.\n * @param outOffset the offset into the out array the digest is to start at.\n */\n void doFinal(byte[] out, int outOffset)\n {\n\n f0 = 0xFFFFFFFFFFFFFFFFL;\n t0 += bufferPos;\n if (bufferPos > 0 && t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n Arrays.fill(buffer, (byte) 0);// Holds eventually the key if input is null\n Arrays.fill(internalState, 0L);\n\n for (int i = 0; i < chainValue.length && (i * 8 < digestLength); i++)\n {\n byte[] bytes = Utils.longToLittleEndian(chainValue[i]);\n\n if (i * 8 < digestLength - 8)\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, 8);\n }\n else\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, digestLength - (i * 8));\n }\n }\n\n Arrays.fill(chainValue, 0L);\n\n reset();\n }\n\n /**\n * Reset the digest back to it's initial state.\n * The key, the salt and the personal string will\n * remain for further computations.\n */\n void reset()\n {\n bufferPos = 0;\n f0 = 0L;\n t0 = 0L;\n t1 = 0L;\n chainValue = null;\n Arrays.fill(buffer, (byte) 0);\n init();\n }\n\n private void compress(byte[] message, int messagePos)\n {\n\n initializeInternalState();\n\n long[] m = new long[16];\n for (int j = 0; j < 16; j++)\n {\n m[j] = Utils.littleEndianToLong(message, messagePos + j * 8);\n }\n\n for (int round = 0; round < ROUNDS; round++)\n {\n\n // G apply to columns of internalState:m[blake2b_sigma[round][2 *\n // blockPos]] /+1\n functionG(m[SIGMA[round][0]], m[SIGMA[round][1]], 0, 4, 8, 12);\n functionG(m[SIGMA[round][2]], m[SIGMA[round][3]], 1, 5, 9, 13);\n functionG(m[SIGMA[round][4]], m[SIGMA[round][5]], 2, 6, 10, 14);\n functionG(m[SIGMA[round][6]], m[SIGMA[round][7]], 3, 7, 11, 15);\n // G apply to diagonals of internalState:\n functionG(m[SIGMA[round][8]], m[SIGMA[round][9]], 0, 5, 10, 15);\n functionG(m[SIGMA[round][10]], m[SIGMA[round][11]], 1, 6, 11, 12);\n functionG(m[SIGMA[round][12]], m[SIGMA[round][13]], 2, 7, 8, 13);\n functionG(m[SIGMA[round][14]], m[SIGMA[round][15]], 3, 4, 9, 14);\n }\n\n // update chain values:\n for (int offset = 0; offset < chainValue.length; offset++)\n {\n chainValue[offset] = chainValue[offset] ^ internalState[offset] ^ internalState[offset + 8];\n }\n }\n\n private void functionG(long m1, long m2, int posA, int posB, int posC, int posD)\n {\n\n internalState[posA] = internalState[posA] + internalState[posB] + m1;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 32);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 24); // replaces 25 of BLAKE\n internalState[posA] = internalState[posA] + internalState[posB] + m2;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 16);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 63); // replaces 11 of BLAKE\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/Blake2b.java", "repo_name": "Password4j/password4j", "num_chunks": 17, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\n\n\nclass Blake2b\n{\n private static final long[] IV = { 0x6a09e667f3bcc908L, 0xbb67ae8584caa73bL, 0x3c6ef372fe94f82bL, 0xa54ff53a5f1d36f1L,\n 0x510e527fade682d1L, 0x9b05688c2b3e6c1fL, 0x1f83d9abfb41bd6bL, 0x5be0cd19137e2179L };\n\n", "meta": {"hash_id": "030f05fa092404cbb0bf8443b12cb112715b629135d50a6336ccebf563a0c2c0"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 1, "content": " private static final byte[][] SIGMA = { { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 }, { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },\n { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 }, { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },\n { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 }, { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },\n { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 }, { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },\n { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },\n { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 } };\n\n", "meta": {"hash_id": "7ac872c21aa7697594c7032f159ff0d3b481ba3598d7bb6804292c11d79b4003"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 2, "content": " private static final int ROUNDS = 12;\n\n private static final int BLOCK_LENGTH_BYTES = 128;\n\n private final int digestLength;\n\n private final int keyLength;\n\n private final byte[] buffer;\n\n private final long[] internalState = new long[16];\n\n private int bufferPos = 0;\n\n private long[] chainValue = null;\n\n private long t0 = 0L;\n\n private long t1 = 0L;\n\n private long f0 = 0L;\n\n", "meta": {"hash_id": "868fb873412b06a4193016e41f4ab1d48bb645090abbc4c075c3b87580824632"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 3, "content": " /**\n * Basic sized constructor - size in bytes.\n *\n * @param digestSize size of the digest in bytes\n */\n Blake2b(int digestSize)\n {\n if (digestSize < 1 || digestSize > 64)\n {\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n }\n\n buffer = new byte[BLOCK_LENGTH_BYTES];\n keyLength = 0;\n this.digestLength = digestSize;\n init();\n }\n\n", "meta": {"hash_id": "487cd37a4741b2d8bb79276c84327a44c03f02ea01ca93cb0f3c078224a1b532"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 4, "content": " // initialize chainValue\n private void init()\n {\n chainValue = new long[8];\n chainValue[0] = IV[0] ^ (digestLength | ((long) keyLength << 8) | 0x1010000);\n chainValue[1] = IV[1];\n chainValue[2] = IV[2];\n chainValue[3] = IV[3];\n chainValue[4] = IV[4];\n chainValue[5] = IV[5];\n chainValue[6] = IV[6];\n chainValue[7] = IV[7];\n }\n\n", "meta": {"hash_id": "2864c1ca855f95ad345484d35073ff779f0e627ffd14fbe01e62dd3a4b041149"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 5, "content": " private void initializeInternalState()\n {\n System.arraycopy(chainValue, 0, internalState, 0, chainValue.length);\n System.arraycopy(IV, 0, internalState, chainValue.length, 4);\n internalState[12] = t0 ^ IV[4];\n internalState[13] = t1 ^ IV[5];\n internalState[14] = f0 ^ IV[6];\n internalState[15] = IV[7];// ^ f1 with f1 = 0\n }\n\n", "meta": {"hash_id": "6af50bcad7c7162586aab5f53314a8fdb4ce66a2767266bf8467e9d676348707"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 6, "content": " void update(byte[] message)\n {\n if (message == null)\n {\n return;\n }\n update(message, 0, message.length);\n }\n\n /**\n * update the message digest with a block of bytes.\n *\n * @param message the byte array containing the data.\n * @param offset the offset into the byte array where the data starts.\n * @param len the length of the data.\n */\n void update(byte[] message, int offset, int len)\n {\n int remainingLength = 0;\n\n", "meta": {"hash_id": "519cf7f6ee589c0e7c4b1bbe1339a7371e556810f3421240c7c9fede8eefb1f7"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 7, "content": " if (bufferPos != 0)\n {\n remainingLength = BLOCK_LENGTH_BYTES - bufferPos;\n if (remainingLength < len)\n {\n System.arraycopy(message, offset, buffer, bufferPos, remainingLength);\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n bufferPos = 0;\n Arrays.fill(buffer, (byte) 0);// clear buffer\n }\n else\n {\n System.arraycopy(message, offset, buffer, bufferPos, len);\n bufferPos += len;\n return;\n }\n }\n\n", "meta": {"hash_id": "414938084cea2f5335cfd48f8b3c7a62b1a710f908003424757efe3a1758dc4a"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 8, "content": " int messagePos;\n int blockWiseLastPos = offset + len - BLOCK_LENGTH_BYTES;\n for (messagePos = offset + remainingLength; messagePos < blockWiseLastPos; messagePos += BLOCK_LENGTH_BYTES)\n {\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(message, messagePos);\n }\n\n", "meta": {"hash_id": "23eaff22bb9f798e6e64a870209a0cbbdbd2e8539ed76f33a57767163645bbfa"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 9, "content": " // fill the buffer with left bytes, this might be a full block\n System.arraycopy(message, messagePos, buffer, 0, offset + len - messagePos);\n bufferPos += offset + len - messagePos;\n }\n\n /**\n * close the digest, producing the final digest value. The doFinal\n * call leaves the digest reset.\n * Key, salt and personal string remain.\n *\n * @param out the array the digest is to be copied into.\n * @param outOffset the offset into the out array the digest is to start at.\n */\n void doFinal(byte[] out, int outOffset)\n {\n\n", "meta": {"hash_id": "a006b54a79da16c523aa73e4acfaf0af027b702d53d04ff4f06f6771ed7a7aeb"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 10, "content": " f0 = 0xFFFFFFFFFFFFFFFFL;\n t0 += bufferPos;\n if (bufferPos > 0 && t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n Arrays.fill(buffer, (byte) 0);// Holds eventually the key if input is null\n Arrays.fill(internalState, 0L);\n\n for (int i = 0; i < chainValue.length && (i * 8 < digestLength); i++)\n {\n byte[] bytes = Utils.longToLittleEndian(chainValue[i]);\n\n", "meta": {"hash_id": "bb76de8510c375643cb6e697a9ae1da10ab32439ba52f226329138d0cdf800fd"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 11, "content": " if (i * 8 < digestLength - 8)\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, 8);\n }\n else\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, digestLength - (i * 8));\n }\n }\n\n Arrays.fill(chainValue, 0L);\n\n reset();\n }\n\n", "meta": {"hash_id": "a41730ca45c1a08b287c459887ed58052cba41f3ff4aaa6c8e6ece0d68d47901"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 12, "content": " /**\n * Reset the digest back to it's initial state.\n * The key, the salt and the personal string will\n * remain for further computations.\n */\n void reset()\n {\n bufferPos = 0;\n f0 = 0L;\n t0 = 0L;\n t1 = 0L;\n chainValue = null;\n Arrays.fill(buffer, (byte) 0);\n init();\n }\n\n", "meta": {"hash_id": "cf471747123c399c4bb07734a395db899cbede902c8556b730408612fa1a5232"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 13, "content": " private void compress(byte[] message, int messagePos)\n {\n\n initializeInternalState();\n\n long[] m = new long[16];\n for (int j = 0; j < 16; j++)\n {\n m[j] = Utils.littleEndianToLong(message, messagePos + j * 8);\n }\n\n for (int round = 0; round < ROUNDS; round++)\n {\n\n", "meta": {"hash_id": "b255faa394dbfe3b190c4dda7a092d4691dd7e4803e962568114baa8f15e53cc"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 14, "content": " // G apply to columns of internalState:m[blake2b_sigma[round][2 *\n // blockPos]] /+1\n functionG(m[SIGMA[round][0]], m[SIGMA[round][1]], 0, 4, 8, 12);\n functionG(m[SIGMA[round][2]], m[SIGMA[round][3]], 1, 5, 9, 13);\n functionG(m[SIGMA[round][4]], m[SIGMA[round][5]], 2, 6, 10, 14);\n functionG(m[SIGMA[round][6]], m[SIGMA[round][7]], 3, 7, 11, 15);\n // G apply to diagonals of internalState:\n functionG(m[SIGMA[round][8]], m[SIGMA[round][9]], 0, 5, 10, 15);\n functionG(m[SIGMA[round][10]], m[SIGMA[round][11]], 1, 6, 11, 12);\n functionG(m[SIGMA[round][12]], m[SIGMA[round][13]], 2, 7, 8, 13);\n functionG(m[SIGMA[round][14]], m[SIGMA[round][15]], 3, 4, 9, 14);\n }\n\n", "meta": {"hash_id": "c62b55ab9d065eeaa641328c238bd250315fc5ecd20e5e7e33bf3f140faf4d4b"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 15, "content": " // update chain values:\n for (int offset = 0; offset < chainValue.length; offset++)\n {\n chainValue[offset] = chainValue[offset] ^ internalState[offset] ^ internalState[offset + 8];\n }\n }\n\n private void functionG(long m1, long m2, int posA, int posB, int posC, int posD)\n {\n\n", "meta": {"hash_id": "eafb4bb7590170198f6cec0d051c9221458c1962d3a0fb1dce4e837787ada236"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 16, "content": " internalState[posA] = internalState[posA] + internalState[posB] + m1;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 32);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 24); // replaces 25 of BLAKE\n internalState[posA] = internalState[posA] + internalState[posB] + m2;\n internalState[posD] = Long.rotateRight(internalState[posD] ^ internalState[posA], 16);\n internalState[posC] = internalState[posC] + internalState[posD];\n internalState[posB] = Long.rotateRight(internalState[posB] ^ internalState[posC], 63); // replaces 11 of BLAKE\n }\n}\n", "meta": {"hash_id": "c5e9d3fb067fd14d929348bfeb795af6cf6cf3415c60772bc701b84cf5e2e959"}}]}], "golden_chunks": [{"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 11, "content": " if (i * 8 < digestLength - 8)\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, 8);\n }\n else\n {\n System.arraycopy(bytes, 0, out, outOffset + i * 8, digestLength - (i * 8));\n }\n }\n\n Arrays.fill(chainValue, 0L);\n\n reset();\n }\n\n", "meta": {"hash_id": "a41730ca45c1a08b287c459887ed58052cba41f3ff4aaa6c8e6ece0d68d47901"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 10, "content": " f0 = 0xFFFFFFFFFFFFFFFFL;\n t0 += bufferPos;\n if (bufferPos > 0 && t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n Arrays.fill(buffer, (byte) 0);// Holds eventually the key if input is null\n Arrays.fill(internalState, 0L);\n\n for (int i = 0; i < chainValue.length && (i * 8 < digestLength); i++)\n {\n byte[] bytes = Utils.longToLittleEndian(chainValue[i]);\n\n", "meta": {"hash_id": "bb76de8510c375643cb6e697a9ae1da10ab32439ba52f226329138d0cdf800fd"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 7, "content": " if (bufferPos != 0)\n {\n remainingLength = BLOCK_LENGTH_BYTES - bufferPos;\n if (remainingLength < len)\n {\n System.arraycopy(message, offset, buffer, bufferPos, remainingLength);\n t0 += BLOCK_LENGTH_BYTES;\n if (t0 == 0)\n {\n t1++;\n }\n compress(buffer, 0);\n bufferPos = 0;\n Arrays.fill(buffer, (byte) 0);// clear buffer\n }\n else\n {\n System.arraycopy(message, offset, buffer, bufferPos, len);\n bufferPos += len;\n return;\n }\n }\n\n", "meta": {"hash_id": "414938084cea2f5335cfd48f8b3c7a62b1a710f908003424757efe3a1758dc4a"}}, {"doc_uuid": "cf29d0f1b0d00030546c50e9c15f1fc432ba590aaa270f7c6dcfa2591c386f25", "index": 3, "content": " /**\n * Basic sized constructor - size in bytes.\n *\n * @param digestSize size of the digest in bytes\n */\n Blake2b(int digestSize)\n {\n if (digestSize < 1 || digestSize > 64)\n {\n throw new BadParametersException(\"BLAKE2b digest bytes length must be not greater than 64\");\n }\n\n buffer = new byte[BLOCK_LENGTH_BYTES];\n keyLength = 0;\n this.digestLength = digestSize;\n init();\n }\n\n", "meta": {"hash_id": "487cd37a4741b2d8bb79276c84327a44c03f02ea01ca93cb0f3c078224a1b532"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/Blake2b.java", "repo_name": "Password4j/password4j", "num_chunks": 17, "source": "codebases/jsonl"}} +{"query": "What class does BadParametersException extend?", "answer": "BadParametersException extends the IllegalArgumentException class, as seen in the class declaration: \"public class BadParametersException extends IllegalArgumentException\".", "golden_doc_uuids": ["2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757"], "golden_chunk_uuids": [["2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", 1]], "golden_documents": [{"uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/BadParametersException.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "d165334def646422a508f07c52e22864940865d18c048f0c38f4ec5397914ad8"}}, {"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 1, "content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"hash_id": "cd95746ee84cd45a7f48b07dffd4c582d07bde5c7df359a854ae7e6d3d1dfec3"}}]}], "golden_chunks": [{"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 1, "content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"hash_id": "cd95746ee84cd45a7f48b07dffd4c582d07bde5c7df359a854ae7e6d3d1dfec3"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/BadParametersException.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What package does the BadParametersException class belong to?", "answer": "The BadParametersException class belongs to the \"com.password4j\" package, as specified in the package declaration at the top of the file: \"package com.password4j;\".", "golden_doc_uuids": ["2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757"], "golden_chunk_uuids": [["2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", 1], ["2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", 0]], "golden_documents": [{"uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/BadParametersException.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "d165334def646422a508f07c52e22864940865d18c048f0c38f4ec5397914ad8"}}, {"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 1, "content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"hash_id": "cd95746ee84cd45a7f48b07dffd4c582d07bde5c7df359a854ae7e6d3d1dfec3"}}]}], "golden_chunks": [{"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 1, "content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"hash_id": "cd95746ee84cd45a7f48b07dffd4c582d07bde5c7df359a854ae7e6d3d1dfec3"}}, {"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "d165334def646422a508f07c52e22864940865d18c048f0c38f4ec5397914ad8"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/BadParametersException.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "Who is the author of the BadParametersException class?", "answer": "According to the Javadoc comment, the author of the BadParametersException class is David Bertoldi, as stated in the \"@author David Bertoldi\" tag.", "golden_doc_uuids": ["2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757"], "golden_chunk_uuids": [["2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", 1]], "golden_documents": [{"uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/BadParametersException.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "d165334def646422a508f07c52e22864940865d18c048f0c38f4ec5397914ad8"}}, {"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 1, "content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"hash_id": "cd95746ee84cd45a7f48b07dffd4c582d07bde5c7df359a854ae7e6d3d1dfec3"}}]}], "golden_chunks": [{"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 1, "content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"hash_id": "cd95746ee84cd45a7f48b07dffd4c582d07bde5c7df359a854ae7e6d3d1dfec3"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/BadParametersException.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "Since which version has the BadParametersException class been available?", "answer": "The BadParametersException class has been available since version 0.1.0, as indicated by the \"@since 0.1.0\" tags in the Javadoc comments for the class and its constructors.", "golden_doc_uuids": ["2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757"], "golden_chunk_uuids": [["2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", 1]], "golden_documents": [{"uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/BadParametersException.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "d165334def646422a508f07c52e22864940865d18c048f0c38f4ec5397914ad8"}}, {"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 1, "content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"hash_id": "cd95746ee84cd45a7f48b07dffd4c582d07bde5c7df359a854ae7e6d3d1dfec3"}}]}], "golden_chunks": [{"doc_uuid": "2c96230c24dfd108a09abadeb43abd5f3220c4890e6e1bef947ab5dbb7628757", "index": 1, "content": "/**\n * This exception is normally thrown when a not well formed parameter\n * is passed as argument to a function.\n *

\n * This exception covers all the exceptions raised by underlying logic,\n * grouping them as one exception.\n *\n * @author David Bertoldi\n * @since 0.1.0\n */\npublic class BadParametersException extends IllegalArgumentException\n{\n\n private static final long serialVersionUID = 9204720180786210237L;\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @since 0.1.0\n */\n public BadParametersException(String message)\n {\n super(message);\n }\n\n /**\n * Constructs the exception.\n *\n * @param message the message describing the cause of the exception\n * @param exception the exception masked by this object\n * @since 0.1.0\n */\n public BadParametersException(String message, Throwable exception)\n {\n super(message, exception);\n }\n}\n", "meta": {"hash_id": "cd95746ee84cd45a7f48b07dffd4c582d07bde5c7df359a854ae7e6d3d1dfec3"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/BadParametersException.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the Hash class?", "answer": "The Hash class represents the result of a cryptographic hashing function applied to a password. It contains the computed hash value, salt, pepper, and the hashing function used. According to the class documentation, \"This class contains all the information computed after calculating a cryptographic hash.\"", "golden_doc_uuids": ["bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b"], "golden_chunk_uuids": [["bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", 1]], "golden_documents": [{"uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\nimport java.util.Objects;\n\n\n/**\n * This class contains all the information computed after\n * calculating a cryptographic hash.\n *

\n * The same {@link HashingFunction} used to generate the hash\n * is used to verify the plain password; in addition cryptographic\n * seasoning such as salt and pepper are stored in this object.\n *

\n * A hash is the product of a one-way function that maps data of arbitrary size to\n * fixed-size values; it is called hashing function (HF).\n * This class represent hashes generated by cryptographic hash function (CHF),\n * where each function has the following properties:\n *

    \n *
  • it is deterministic, meaning that the same message always results in the same hash
  • \n *
  • it is quick to compute the hash value for any given message
  • \n *
  • it is infeasible to generate a message that yields a given hash value
  • \n *
  • it is infeasible to find two different messages with the same hash value
  • \n *
  • a small change to a message should change the hash value so extensively that the new hash value\n * appears uncorrelated with the old hash value
  • \n *
\n *

\n * A salt is a unique, randomly generated string that is added to each password as part of the hashing process.\n * As the salt is unique for every user, an attacker has to crack hashes one at a time using the respective salt,\n * rather than being able to calculate a hash once and compare it against every stored hash.\n *

\n * A pepper can be used in additional to salting to provide an additional layer of protection.\n * It is similar to a salt, but has two key differences:\n *

    \n *
  • The pepper is shared between all stored passwords, rather than being unique like a salt.
  • \n *
  • The pepper is not stored in the database, unlike the salts.
  • \n *
\n *\n * @author David Bertoldi\n * @see
OWASP Password Storage Cheat Sheet\n * @see Key derivation function\n * @see Cryptographic hash function\n * @since 0.1.0\n */\npublic class Hash\n{\n\n /**\n * Represents the full output of a cryptographic hashing function.\n * Depending on the implementation of the CHF, it may contain\n * the salt and the configuration of the CHF itself.\n */\n private byte[] result;\n\n /**\n * Represents the computed output of a cryptographic hashing function.\n * It never contains salt and other configurations.\n */\n private byte[] bytes;\n\n /**\n * Represents the salt: random data that is used as an additional input\n * to a cryptographic hashing function.\n */\n private byte[] salt;\n\n /**\n * Represents the pepper: a secret added to the input password\n * prior to being hashed with a cryptographic hash function\n */\n private CharSequence pepper;\n\n /**\n * Represents the hashing function used to generate this object.\n *\n * @see HashingFunction for more details\n */\n private HashingFunction hashingFunction;\n\n /**\n * It is meant to not be used if not internally.\n * The other constructor must be used instead.\n *\n * @see Hash#Hash(HashingFunction, String, byte[], String)\n * @since 0.1.0\n */\n @SuppressWarnings(\"unused\")\n private Hash()\n {\n //\n }\n\n /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n * @deprecated As of 1.8.1 because of the salt conversion from {@link String} to byte[].\n * {@link Hash#Hash(HashingFunction, String, byte[], byte[])} should be used instead.\n */\n @Deprecated\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, String salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, Utils.fromCharSequenceToBytes(salt));\n }\n\n\n\n /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n */\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, byte[] salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, salt);\n }\n\n /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash as bytes array.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation as bytes array.\n * @since 1.7.0\n */\n public Hash(HashingFunction hashingFunction, byte[] result, byte[] bytes, byte[] salt)\n {\n this.hashingFunction = hashingFunction;\n this.salt = salt;\n this.result = result;\n this.bytes = bytes;\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public String getResult()\n {\n return Utils.fromBytesToString(result);\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public byte[] getResultAsBytes()\n {\n return result;\n }\n\n /**\n * Retrieves the hash as byte array and without the parameters\n * encoded in the final hash.\n *\n * @return the hash.\n * @since 1.5.1\n */\n public byte[] getBytes()\n {\n return bytes;\n }\n\n /**\n * Retrieves the {@link HashingFunction} used\n * to hash the password.\n *\n * @return the CHF\n * @since 0.4.0\n */\n public HashingFunction getHashingFunction()\n {\n return hashingFunction;\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as {@link String}.\n * @since 0.1.0\n */\n public String getSalt()\n {\n return Utils.fromBytesToString(salt);\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as bytes array.\n * @since 1.7.0\n */\n public byte[] getSaltBytes()\n {\n return salt;\n }\n\n /**\n * Retrieves the pepper used with the password in the hashing function.\n *\n * @return the pepper.\n * @since 0.1.0\n */\n public CharSequence getPepper()\n {\n return pepper;\n }\n\n /**\n * Stores the pepper used together with the password in the hashing function.\n *

\n * This methods should be used just after the creation of this object.\n *\n * @param pepper the pepper used.\n * @since 0.1.0\n */\n void setPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n }\n\n /**\n * Produces a human-readable description of the {@link Hash}.\n *\n * @return a readable version of this object\n * @since 0.1.0\n */\n @Override\n public String toString()\n {\n StringBuilder sb = new StringBuilder();\n if (this.hashingFunction != null)\n {\n sb.append(hashingFunction.getClass().getSimpleName());\n }\n sb.append(\"[salt=\").append(getSalt()).append(\", pepper=\").append(getPepper()).append(\", hash=\").append(getResult())\n .append(\"]\");\n return sb.toString();\n }\n\n /**\n * Two {@link Hash}es are considered equals if they contain\n * the same hash, salt, pepper and they are generated with\n * the same {@link HashingFunction}\n *\n * @param obj the object to compare\n * @return true if equals\n * @since 0.1.0\n */\n @Override\n public boolean equals(Object obj)\n {\n if (obj == null || !this.getClass().equals(obj.getClass()))\n {\n return false;\n }\n\n Hash otherHash = (Hash) obj;\n return hasSameValues(otherHash);\n }\n\n private boolean hasSameValues(Hash otherHash)\n {\n return Arrays.equals(this.result, otherHash.result) //\n && Arrays.equals(this.bytes, otherHash.bytes) //\n && Arrays.equals(this.salt, otherHash.salt) //\n && areEquals(this.pepper, otherHash.pepper) //\n && this.hashingFunction.equals(otherHash.hashingFunction);\n }\n\n private static boolean areEquals(CharSequence cs1, CharSequence cs2)\n {\n if (cs1 == cs2)\n {\n return true;\n }\n else if (cs1 != null && cs2 != null)\n {\n return cs1.equals(cs2);\n }\n return false;\n }\n\n @Override\n public int hashCode()\n {\n return Objects.hash(Arrays.hashCode(result), Arrays.hashCode(salt), pepper, hashingFunction);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/Hash.java", "repo_name": "Password4j/password4j", "num_chunks": 16, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\nimport java.util.Objects;\n\n", "meta": {"hash_id": "850fbd17a3b07b7d3996d100bcc8b493b86f9256e8bfe147ef2983da1fefb239"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 1, "content": "\n/**\n * This class contains all the information computed after\n * calculating a cryptographic hash.\n *

\n * The same {@link HashingFunction} used to generate the hash\n * is used to verify the plain password; in addition cryptographic\n * seasoning such as salt and pepper are stored in this object.\n *

\n * A hash is the product of a one-way function that maps data of arbitrary size to\n * fixed-size values; it is called hashing function (HF).\n * This class represent hashes generated by cryptographic hash function (CHF),\n * where each function has the following properties:\n *

    \n *
  • it is deterministic, meaning that the same message always results in the same hash
  • \n *
  • it is quick to compute the hash value for any given message
  • \n *
  • it is infeasible to generate a message that yields a given hash value
  • \n *
  • it is infeasible to find two different messages with the same hash value
  • \n", "meta": {"hash_id": "0bbe66f31f8e7e54d25a01d3b643c95119bf277203eefc138ee63e0dccf6c90c"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 2, "content": " *
  • a small change to a message should change the hash value so extensively that the new hash value\n * appears uncorrelated with the old hash value
  • \n *
\n *

\n * A salt is a unique, randomly generated string that is added to each password as part of the hashing process.\n * As the salt is unique for every user, an attacker has to crack hashes one at a time using the respective salt,\n * rather than being able to calculate a hash once and compare it against every stored hash.\n *

\n", "meta": {"hash_id": "810a932eb18d944ed7559325a22c2f7443d4249d7f6573ad5ea77e5377a1c3ad"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 3, "content": " * A pepper can be used in additional to salting to provide an additional layer of protection.\n * It is similar to a salt, but has two key differences:\n *

    \n *
  • The pepper is shared between all stored passwords, rather than being unique like a salt.
  • \n *
  • The pepper is not stored in the database, unlike the salts.
  • \n *
\n *\n * @author David Bertoldi\n * @see OWASP Password Storage Cheat Sheet\n * @see Key derivation function\n * @see Cryptographic hash function\n * @since 0.1.0\n */\npublic class Hash\n{\n\n", "meta": {"hash_id": "a07b37dfef227581994367b1c927bf3b0468d4b8b8fb38928930f8f95b028237"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 4, "content": " /**\n * Represents the full output of a cryptographic hashing function.\n * Depending on the implementation of the CHF, it may contain\n * the salt and the configuration of the CHF itself.\n */\n private byte[] result;\n\n /**\n * Represents the computed output of a cryptographic hashing function.\n * It never contains salt and other configurations.\n */\n private byte[] bytes;\n\n /**\n * Represents the salt: random data that is used as an additional input\n * to a cryptographic hashing function.\n */\n private byte[] salt;\n\n", "meta": {"hash_id": "5ce2ee61febe41e9c99ebbe7ee04d64f202479c6362fc702628228b587380f60"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 5, "content": " /**\n * Represents the pepper: a secret added to the input password\n * prior to being hashed with a cryptographic hash function\n */\n private CharSequence pepper;\n\n /**\n * Represents the hashing function used to generate this object.\n *\n * @see HashingFunction for more details\n */\n private HashingFunction hashingFunction;\n\n /**\n * It is meant to not be used if not internally.\n * The other constructor must be used instead.\n *\n * @see Hash#Hash(HashingFunction, String, byte[], String)\n * @since 0.1.0\n */\n @SuppressWarnings(\"unused\")\n private Hash()\n {\n //\n }\n\n", "meta": {"hash_id": "2f115ed43d3006e8f4425b181e168453db7ece56d0060673499eb30d2a4005d4"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 6, "content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n", "meta": {"hash_id": "f9ad64b33b39857cb8796dd13f9a02e8bfe9e5e8587e6b6a106618371ab24281"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 7, "content": " * @deprecated As of 1.8.1 because of the salt conversion from {@link String} to byte[].\n * {@link Hash#Hash(HashingFunction, String, byte[], byte[])} should be used instead.\n */\n @Deprecated\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, String salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, Utils.fromCharSequenceToBytes(salt));\n }\n\n\n\n", "meta": {"hash_id": "d130c159175059c28d5d8ee9f3a9900f57bcc8a19e2099d010d6deaeb6f4a898"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 8, "content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n */\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, byte[] salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, salt);\n }\n\n", "meta": {"hash_id": "e30eab3d29a54cdb3bcc2262ef40e765e9626fb6ed04c94e995998184c06e7e8"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 9, "content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

\n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash as bytes array.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n", "meta": {"hash_id": "eae950a0b84ba3c1cc5419921b6ec10bd32f4b848a876d1b234882dc17899a1b"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 10, "content": " * @param salt the salt used for the computation as bytes array.\n * @since 1.7.0\n */\n public Hash(HashingFunction hashingFunction, byte[] result, byte[] bytes, byte[] salt)\n {\n this.hashingFunction = hashingFunction;\n this.salt = salt;\n this.result = result;\n this.bytes = bytes;\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public String getResult()\n {\n return Utils.fromBytesToString(result);\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public byte[] getResultAsBytes()\n {\n return result;\n }\n\n /**\n * Retrieves the hash as byte array and without the parameters\n * encoded in the final hash.\n *\n * @return the hash.\n * @since 1.5.1\n */\n public byte[] getBytes()\n {\n return bytes;\n }\n\n", "meta": {"hash_id": "53aa574186dc21813ce5d5d1821321eeb132959ed5f855809a1462449f7bf254"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 11, "content": " /**\n * Retrieves the {@link HashingFunction} used\n * to hash the password.\n *\n * @return the CHF\n * @since 0.4.0\n */\n public HashingFunction getHashingFunction()\n {\n return hashingFunction;\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as {@link String}.\n * @since 0.1.0\n */\n public String getSalt()\n {\n return Utils.fromBytesToString(salt);\n }\n\n", "meta": {"hash_id": "512b81019065df884fb02f34a2bbdd11878ce0711dec02548619a74c775ce354"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 12, "content": " /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as bytes array.\n * @since 1.7.0\n */\n public byte[] getSaltBytes()\n {\n return salt;\n }\n\n /**\n * Retrieves the pepper used with the password in the hashing function.\n *\n * @return the pepper.\n * @since 0.1.0\n */\n public CharSequence getPepper()\n {\n return pepper;\n }\n\n /**\n * Stores the pepper used together with the password in the hashing function.\n *

\n * This methods should be used just after the creation of this object.\n *\n * @param pepper the pepper used.\n * @since 0.1.0\n */\n void setPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n }\n\n", "meta": {"hash_id": "740e7763e967f12dac926c5655210305b7692e0b99b2f6c33aa2f6bd6c98e250"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 13, "content": " /**\n * Produces a human-readable description of the {@link Hash}.\n *\n * @return a readable version of this object\n * @since 0.1.0\n */\n @Override\n public String toString()\n {\n StringBuilder sb = new StringBuilder();\n if (this.hashingFunction != null)\n {\n sb.append(hashingFunction.getClass().getSimpleName());\n }\n sb.append(\"[salt=\").append(getSalt()).append(\", pepper=\").append(getPepper()).append(\", hash=\").append(getResult())\n .append(\"]\");\n return sb.toString();\n }\n\n", "meta": {"hash_id": "e5fe7c4dafa3576996ac63902bf8c03696aa9e1afe4b0cfe637a5c1def04b1f9"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 14, "content": " /**\n * Two {@link Hash}es are considered equals if they contain\n * the same hash, salt, pepper and they are generated with\n * the same {@link HashingFunction}\n *\n * @param obj the object to compare\n * @return true if equals\n * @since 0.1.0\n */\n @Override\n public boolean equals(Object obj)\n {\n if (obj == null || !this.getClass().equals(obj.getClass()))\n {\n return false;\n }\n\n", "meta": {"hash_id": "b9fa616be09bf9cfe068f043a043ad0b137c6299d0a49abc6277bb597ba1cbb7"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 15, "content": " Hash otherHash = (Hash) obj;\n return hasSameValues(otherHash);\n }\n\n private boolean hasSameValues(Hash otherHash)\n {\n return Arrays.equals(this.result, otherHash.result) //\n && Arrays.equals(this.bytes, otherHash.bytes) //\n && Arrays.equals(this.salt, otherHash.salt) //\n && areEquals(this.pepper, otherHash.pepper) //\n && this.hashingFunction.equals(otherHash.hashingFunction);\n }\n\n private static boolean areEquals(CharSequence cs1, CharSequence cs2)\n {\n if (cs1 == cs2)\n {\n return true;\n }\n else if (cs1 != null && cs2 != null)\n {\n return cs1.equals(cs2);\n }\n return false;\n }\n\n @Override\n public int hashCode()\n {\n return Objects.hash(Arrays.hashCode(result), Arrays.hashCode(salt), pepper, hashingFunction);\n }\n}\n", "meta": {"hash_id": "1f2ed75b309aaa961ef9d2d7799c169ac33e6de5096baef5b9bb8379477f7f7d"}}]}], "golden_chunks": [{"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 1, "content": "\n/**\n * This class contains all the information computed after\n * calculating a cryptographic hash.\n *

\n * The same {@link HashingFunction} used to generate the hash\n * is used to verify the plain password; in addition cryptographic\n * seasoning such as salt and pepper are stored in this object.\n *

\n * A hash is the product of a one-way function that maps data of arbitrary size to\n * fixed-size values; it is called hashing function (HF).\n * This class represent hashes generated by cryptographic hash function (CHF),\n * where each function has the following properties:\n *

    \n *
  • it is deterministic, meaning that the same message always results in the same hash
  • \n *
  • it is quick to compute the hash value for any given message
  • \n *
  • it is infeasible to generate a message that yields a given hash value
  • \n *
  • it is infeasible to find two different messages with the same hash value
  • \n", "meta": {"hash_id": "0bbe66f31f8e7e54d25a01d3b643c95119bf277203eefc138ee63e0dccf6c90c"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/Hash.java", "repo_name": "Password4j/password4j", "num_chunks": 16, "source": "codebases/jsonl"}} +{"query": "What information does the Hash class store?", "answer": "The Hash class stores the following information:\n\n /**\n * Represents the full output of a cryptographic hashing function.\n * Depending on the implementation of the CHF, it may contain\n * the salt and the configuration of the CHF itself.\n */\n private byte[] result;\n\n /**\n * Represents the computed output of a cryptographic hashing function.\n * It never contains salt and other configurations.\n */\n private byte[] bytes;\n\n /**\n * Represents the salt: random data that is used as an additional input\n * to a cryptographic hashing function.\n */\n private byte[] salt;\n\n /**\n * Represents the pepper: a secret added to the input password\n * prior to being hashed with a cryptographic hash function\n */\n private CharSequence pepper;\n\n /**\n * Represents the hashing function used to generate this object.\n *\n * @see HashingFunction for more details\n */\n private HashingFunction hashingFunction;\n\nIt stores the full hash result (which may include salt and other configurations), the hash bytes without additional information, the salt, the pepper, and the hashing function used.", "golden_doc_uuids": ["bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b"], "golden_chunk_uuids": [["bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", 1]], "golden_documents": [{"uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\nimport java.util.Objects;\n\n\n/**\n * This class contains all the information computed after\n * calculating a cryptographic hash.\n *

    \n * The same {@link HashingFunction} used to generate the hash\n * is used to verify the plain password; in addition cryptographic\n * seasoning such as salt and pepper are stored in this object.\n *

    \n * A hash is the product of a one-way function that maps data of arbitrary size to\n * fixed-size values; it is called hashing function (HF).\n * This class represent hashes generated by cryptographic hash function (CHF),\n * where each function has the following properties:\n *

      \n *
    • it is deterministic, meaning that the same message always results in the same hash
    • \n *
    • it is quick to compute the hash value for any given message
    • \n *
    • it is infeasible to generate a message that yields a given hash value
    • \n *
    • it is infeasible to find two different messages with the same hash value
    • \n *
    • a small change to a message should change the hash value so extensively that the new hash value\n * appears uncorrelated with the old hash value
    • \n *
    \n *

    \n * A salt is a unique, randomly generated string that is added to each password as part of the hashing process.\n * As the salt is unique for every user, an attacker has to crack hashes one at a time using the respective salt,\n * rather than being able to calculate a hash once and compare it against every stored hash.\n *

    \n * A pepper can be used in additional to salting to provide an additional layer of protection.\n * It is similar to a salt, but has two key differences:\n *

      \n *
    • The pepper is shared between all stored passwords, rather than being unique like a salt.
    • \n *
    • The pepper is not stored in the database, unlike the salts.
    • \n *
    \n *\n * @author David Bertoldi\n * @see OWASP Password Storage Cheat Sheet\n * @see Key derivation function\n * @see Cryptographic hash function\n * @since 0.1.0\n */\npublic class Hash\n{\n\n /**\n * Represents the full output of a cryptographic hashing function.\n * Depending on the implementation of the CHF, it may contain\n * the salt and the configuration of the CHF itself.\n */\n private byte[] result;\n\n /**\n * Represents the computed output of a cryptographic hashing function.\n * It never contains salt and other configurations.\n */\n private byte[] bytes;\n\n /**\n * Represents the salt: random data that is used as an additional input\n * to a cryptographic hashing function.\n */\n private byte[] salt;\n\n /**\n * Represents the pepper: a secret added to the input password\n * prior to being hashed with a cryptographic hash function\n */\n private CharSequence pepper;\n\n /**\n * Represents the hashing function used to generate this object.\n *\n * @see HashingFunction for more details\n */\n private HashingFunction hashingFunction;\n\n /**\n * It is meant to not be used if not internally.\n * The other constructor must be used instead.\n *\n * @see Hash#Hash(HashingFunction, String, byte[], String)\n * @since 0.1.0\n */\n @SuppressWarnings(\"unused\")\n private Hash()\n {\n //\n }\n\n /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

    \n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n * @deprecated As of 1.8.1 because of the salt conversion from {@link String} to byte[].\n * {@link Hash#Hash(HashingFunction, String, byte[], byte[])} should be used instead.\n */\n @Deprecated\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, String salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, Utils.fromCharSequenceToBytes(salt));\n }\n\n\n\n /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

    \n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n */\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, byte[] salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, salt);\n }\n\n /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

    \n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash as bytes array.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation as bytes array.\n * @since 1.7.0\n */\n public Hash(HashingFunction hashingFunction, byte[] result, byte[] bytes, byte[] salt)\n {\n this.hashingFunction = hashingFunction;\n this.salt = salt;\n this.result = result;\n this.bytes = bytes;\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public String getResult()\n {\n return Utils.fromBytesToString(result);\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public byte[] getResultAsBytes()\n {\n return result;\n }\n\n /**\n * Retrieves the hash as byte array and without the parameters\n * encoded in the final hash.\n *\n * @return the hash.\n * @since 1.5.1\n */\n public byte[] getBytes()\n {\n return bytes;\n }\n\n /**\n * Retrieves the {@link HashingFunction} used\n * to hash the password.\n *\n * @return the CHF\n * @since 0.4.0\n */\n public HashingFunction getHashingFunction()\n {\n return hashingFunction;\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as {@link String}.\n * @since 0.1.0\n */\n public String getSalt()\n {\n return Utils.fromBytesToString(salt);\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as bytes array.\n * @since 1.7.0\n */\n public byte[] getSaltBytes()\n {\n return salt;\n }\n\n /**\n * Retrieves the pepper used with the password in the hashing function.\n *\n * @return the pepper.\n * @since 0.1.0\n */\n public CharSequence getPepper()\n {\n return pepper;\n }\n\n /**\n * Stores the pepper used together with the password in the hashing function.\n *

    \n * This methods should be used just after the creation of this object.\n *\n * @param pepper the pepper used.\n * @since 0.1.0\n */\n void setPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n }\n\n /**\n * Produces a human-readable description of the {@link Hash}.\n *\n * @return a readable version of this object\n * @since 0.1.0\n */\n @Override\n public String toString()\n {\n StringBuilder sb = new StringBuilder();\n if (this.hashingFunction != null)\n {\n sb.append(hashingFunction.getClass().getSimpleName());\n }\n sb.append(\"[salt=\").append(getSalt()).append(\", pepper=\").append(getPepper()).append(\", hash=\").append(getResult())\n .append(\"]\");\n return sb.toString();\n }\n\n /**\n * Two {@link Hash}es are considered equals if they contain\n * the same hash, salt, pepper and they are generated with\n * the same {@link HashingFunction}\n *\n * @param obj the object to compare\n * @return true if equals\n * @since 0.1.0\n */\n @Override\n public boolean equals(Object obj)\n {\n if (obj == null || !this.getClass().equals(obj.getClass()))\n {\n return false;\n }\n\n Hash otherHash = (Hash) obj;\n return hasSameValues(otherHash);\n }\n\n private boolean hasSameValues(Hash otherHash)\n {\n return Arrays.equals(this.result, otherHash.result) //\n && Arrays.equals(this.bytes, otherHash.bytes) //\n && Arrays.equals(this.salt, otherHash.salt) //\n && areEquals(this.pepper, otherHash.pepper) //\n && this.hashingFunction.equals(otherHash.hashingFunction);\n }\n\n private static boolean areEquals(CharSequence cs1, CharSequence cs2)\n {\n if (cs1 == cs2)\n {\n return true;\n }\n else if (cs1 != null && cs2 != null)\n {\n return cs1.equals(cs2);\n }\n return false;\n }\n\n @Override\n public int hashCode()\n {\n return Objects.hash(Arrays.hashCode(result), Arrays.hashCode(salt), pepper, hashingFunction);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/Hash.java", "repo_name": "Password4j/password4j", "num_chunks": 16, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport java.util.Arrays;\nimport java.util.Objects;\n\n", "meta": {"hash_id": "850fbd17a3b07b7d3996d100bcc8b493b86f9256e8bfe147ef2983da1fefb239"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 1, "content": "\n/**\n * This class contains all the information computed after\n * calculating a cryptographic hash.\n *

    \n * The same {@link HashingFunction} used to generate the hash\n * is used to verify the plain password; in addition cryptographic\n * seasoning such as salt and pepper are stored in this object.\n *

    \n * A hash is the product of a one-way function that maps data of arbitrary size to\n * fixed-size values; it is called hashing function (HF).\n * This class represent hashes generated by cryptographic hash function (CHF),\n * where each function has the following properties:\n *

      \n *
    • it is deterministic, meaning that the same message always results in the same hash
    • \n *
    • it is quick to compute the hash value for any given message
    • \n *
    • it is infeasible to generate a message that yields a given hash value
    • \n *
    • it is infeasible to find two different messages with the same hash value
    • \n", "meta": {"hash_id": "0bbe66f31f8e7e54d25a01d3b643c95119bf277203eefc138ee63e0dccf6c90c"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 2, "content": " *
    • a small change to a message should change the hash value so extensively that the new hash value\n * appears uncorrelated with the old hash value
    • \n *
    \n *

    \n * A salt is a unique, randomly generated string that is added to each password as part of the hashing process.\n * As the salt is unique for every user, an attacker has to crack hashes one at a time using the respective salt,\n * rather than being able to calculate a hash once and compare it against every stored hash.\n *

    \n", "meta": {"hash_id": "810a932eb18d944ed7559325a22c2f7443d4249d7f6573ad5ea77e5377a1c3ad"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 3, "content": " * A pepper can be used in additional to salting to provide an additional layer of protection.\n * It is similar to a salt, but has two key differences:\n *

      \n *
    • The pepper is shared between all stored passwords, rather than being unique like a salt.
    • \n *
    • The pepper is not stored in the database, unlike the salts.
    • \n *
    \n *\n * @author David Bertoldi\n * @see OWASP Password Storage Cheat Sheet\n * @see Key derivation function\n * @see Cryptographic hash function\n * @since 0.1.0\n */\npublic class Hash\n{\n\n", "meta": {"hash_id": "a07b37dfef227581994367b1c927bf3b0468d4b8b8fb38928930f8f95b028237"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 4, "content": " /**\n * Represents the full output of a cryptographic hashing function.\n * Depending on the implementation of the CHF, it may contain\n * the salt and the configuration of the CHF itself.\n */\n private byte[] result;\n\n /**\n * Represents the computed output of a cryptographic hashing function.\n * It never contains salt and other configurations.\n */\n private byte[] bytes;\n\n /**\n * Represents the salt: random data that is used as an additional input\n * to a cryptographic hashing function.\n */\n private byte[] salt;\n\n", "meta": {"hash_id": "5ce2ee61febe41e9c99ebbe7ee04d64f202479c6362fc702628228b587380f60"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 5, "content": " /**\n * Represents the pepper: a secret added to the input password\n * prior to being hashed with a cryptographic hash function\n */\n private CharSequence pepper;\n\n /**\n * Represents the hashing function used to generate this object.\n *\n * @see HashingFunction for more details\n */\n private HashingFunction hashingFunction;\n\n /**\n * It is meant to not be used if not internally.\n * The other constructor must be used instead.\n *\n * @see Hash#Hash(HashingFunction, String, byte[], String)\n * @since 0.1.0\n */\n @SuppressWarnings(\"unused\")\n private Hash()\n {\n //\n }\n\n", "meta": {"hash_id": "2f115ed43d3006e8f4425b181e168453db7ece56d0060673499eb30d2a4005d4"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 6, "content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

    \n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n", "meta": {"hash_id": "f9ad64b33b39857cb8796dd13f9a02e8bfe9e5e8587e6b6a106618371ab24281"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 7, "content": " * @deprecated As of 1.8.1 because of the salt conversion from {@link String} to byte[].\n * {@link Hash#Hash(HashingFunction, String, byte[], byte[])} should be used instead.\n */\n @Deprecated\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, String salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, Utils.fromCharSequenceToBytes(salt));\n }\n\n\n\n", "meta": {"hash_id": "d130c159175059c28d5d8ee9f3a9900f57bcc8a19e2099d010d6deaeb6f4a898"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 8, "content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

    \n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n * @param salt the salt used for the computation.\n * @since 0.1.0\n */\n public Hash(HashingFunction hashingFunction, String result, byte[] bytes, byte[] salt)\n {\n this(hashingFunction, Utils.fromCharSequenceToBytes(result), bytes, salt);\n }\n\n", "meta": {"hash_id": "e30eab3d29a54cdb3bcc2262ef40e765e9626fb6ed04c94e995998184c06e7e8"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 9, "content": " /**\n * Constructs an {@link Hash} containing the basic information\n * used and produced by the computational process of hashing a password.\n * Other information, like pepper can be added with\n * {@link #setPepper(CharSequence)}.\n *

    \n * This constructor populates the object's attributes.\n *\n * @param hashingFunction the cryptographic algorithm used to produce the hash.\n * @param result the result of the computation of the hash as bytes array.\n * Notice that the format varies depending on the algorithm.\n * @param bytes the hash without additional information.\n", "meta": {"hash_id": "eae950a0b84ba3c1cc5419921b6ec10bd32f4b848a876d1b234882dc17899a1b"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 10, "content": " * @param salt the salt used for the computation as bytes array.\n * @since 1.7.0\n */\n public Hash(HashingFunction hashingFunction, byte[] result, byte[] bytes, byte[] salt)\n {\n this.hashingFunction = hashingFunction;\n this.salt = salt;\n this.result = result;\n this.bytes = bytes;\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public String getResult()\n {\n return Utils.fromBytesToString(result);\n }\n\n /**\n * Retrieves the hash computed by the hashing function.\n *\n * @return the hash.\n * @since 0.1.0\n */\n public byte[] getResultAsBytes()\n {\n return result;\n }\n\n /**\n * Retrieves the hash as byte array and without the parameters\n * encoded in the final hash.\n *\n * @return the hash.\n * @since 1.5.1\n */\n public byte[] getBytes()\n {\n return bytes;\n }\n\n", "meta": {"hash_id": "53aa574186dc21813ce5d5d1821321eeb132959ed5f855809a1462449f7bf254"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 11, "content": " /**\n * Retrieves the {@link HashingFunction} used\n * to hash the password.\n *\n * @return the CHF\n * @since 0.4.0\n */\n public HashingFunction getHashingFunction()\n {\n return hashingFunction;\n }\n\n /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as {@link String}.\n * @since 0.1.0\n */\n public String getSalt()\n {\n return Utils.fromBytesToString(salt);\n }\n\n", "meta": {"hash_id": "512b81019065df884fb02f34a2bbdd11878ce0711dec02548619a74c775ce354"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 12, "content": " /**\n * Retrieves the salt used by the hashing function.\n *\n * @return the salt as bytes array.\n * @since 1.7.0\n */\n public byte[] getSaltBytes()\n {\n return salt;\n }\n\n /**\n * Retrieves the pepper used with the password in the hashing function.\n *\n * @return the pepper.\n * @since 0.1.0\n */\n public CharSequence getPepper()\n {\n return pepper;\n }\n\n /**\n * Stores the pepper used together with the password in the hashing function.\n *

    \n * This methods should be used just after the creation of this object.\n *\n * @param pepper the pepper used.\n * @since 0.1.0\n */\n void setPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n }\n\n", "meta": {"hash_id": "740e7763e967f12dac926c5655210305b7692e0b99b2f6c33aa2f6bd6c98e250"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 13, "content": " /**\n * Produces a human-readable description of the {@link Hash}.\n *\n * @return a readable version of this object\n * @since 0.1.0\n */\n @Override\n public String toString()\n {\n StringBuilder sb = new StringBuilder();\n if (this.hashingFunction != null)\n {\n sb.append(hashingFunction.getClass().getSimpleName());\n }\n sb.append(\"[salt=\").append(getSalt()).append(\", pepper=\").append(getPepper()).append(\", hash=\").append(getResult())\n .append(\"]\");\n return sb.toString();\n }\n\n", "meta": {"hash_id": "e5fe7c4dafa3576996ac63902bf8c03696aa9e1afe4b0cfe637a5c1def04b1f9"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 14, "content": " /**\n * Two {@link Hash}es are considered equals if they contain\n * the same hash, salt, pepper and they are generated with\n * the same {@link HashingFunction}\n *\n * @param obj the object to compare\n * @return true if equals\n * @since 0.1.0\n */\n @Override\n public boolean equals(Object obj)\n {\n if (obj == null || !this.getClass().equals(obj.getClass()))\n {\n return false;\n }\n\n", "meta": {"hash_id": "b9fa616be09bf9cfe068f043a043ad0b137c6299d0a49abc6277bb597ba1cbb7"}}, {"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 15, "content": " Hash otherHash = (Hash) obj;\n return hasSameValues(otherHash);\n }\n\n private boolean hasSameValues(Hash otherHash)\n {\n return Arrays.equals(this.result, otherHash.result) //\n && Arrays.equals(this.bytes, otherHash.bytes) //\n && Arrays.equals(this.salt, otherHash.salt) //\n && areEquals(this.pepper, otherHash.pepper) //\n && this.hashingFunction.equals(otherHash.hashingFunction);\n }\n\n private static boolean areEquals(CharSequence cs1, CharSequence cs2)\n {\n if (cs1 == cs2)\n {\n return true;\n }\n else if (cs1 != null && cs2 != null)\n {\n return cs1.equals(cs2);\n }\n return false;\n }\n\n @Override\n public int hashCode()\n {\n return Objects.hash(Arrays.hashCode(result), Arrays.hashCode(salt), pepper, hashingFunction);\n }\n}\n", "meta": {"hash_id": "1f2ed75b309aaa961ef9d2d7799c169ac33e6de5096baef5b9bb8379477f7f7d"}}]}], "golden_chunks": [{"doc_uuid": "bf58cf0c65c709224da0f68ad6bd4fed3df1afcf9677f840b923e95af7377a0b", "index": 1, "content": "\n/**\n * This class contains all the information computed after\n * calculating a cryptographic hash.\n *

    \n * The same {@link HashingFunction} used to generate the hash\n * is used to verify the plain password; in addition cryptographic\n * seasoning such as salt and pepper are stored in this object.\n *

    \n * A hash is the product of a one-way function that maps data of arbitrary size to\n * fixed-size values; it is called hashing function (HF).\n * This class represent hashes generated by cryptographic hash function (CHF),\n * where each function has the following properties:\n *

      \n *
    • it is deterministic, meaning that the same message always results in the same hash
    • \n *
    • it is quick to compute the hash value for any given message
    • \n *
    • it is infeasible to generate a message that yields a given hash value
    • \n *
    • it is infeasible to find two different messages with the same hash value
    • \n", "meta": {"hash_id": "0bbe66f31f8e7e54d25a01d3b643c95119bf277203eefc138ee63e0dccf6c90c"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/Hash.java", "repo_name": "Password4j/password4j", "num_chunks": 16, "source": "codebases/jsonl"}} +{"query": "How do I create a HashBuilder instance?", "answer": "To create a HashBuilder instance, you need to pass either a CharSequence or a byte array representing the plain text password to the constructor. For example:\n\nHashBuilder hashBuilder = new HashBuilder(\"myPassword\");\n// or\nbyte[] passwordBytes = \"myPassword\".getBytes();\nHashBuilder hashBuilder = new HashBuilder(passwordBytes);\n", "golden_doc_uuids": ["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe"], "golden_chunk_uuids": [["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", 6], ["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", 3], ["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", 1], ["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", 0]], "golden_documents": [{"uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param salt cryptographic salt\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addSalt(String salt)\n {\n this.salt = Utils.fromCharSequenceToBytes(salt);\n return this;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param saltAsBytes cryptographic salt as bytes array\n * @return this builder\n * @since 1.7.0\n */\n public HashBuilder addSalt(byte[] saltAsBytes)\n {\n this.salt = saltAsBytes;\n return this;\n }\n\n /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

      \n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n /**\n * Add a random cryptographic salt in the hashing process with a given length.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param length the length of the salt produced\n * @return this builder\n * @throws BadParametersException if the length is non-positive\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt(int length)\n {\n if (length <= 0)\n {\n throw new BadParametersException(\"Salt cannot have a non-positive length\");\n }\n else\n {\n this.salt = SaltGenerator.generate(length);\n }\n return this;\n }\n\n /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

      \n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link BcryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getBcryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withBcrypt()\n {\n return with(AlgorithmFinder.getBcryptInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link ScryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getScryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withScrypt()\n {\n return with(AlgorithmFinder.getScryptInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link MessageDigestFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.4.0\n */\n public Hash withMessageDigest()\n {\n return with(AlgorithmFinder.getMessageDigestInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link Argon2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.5.0\n */\n public Hash withArgon2()\n {\n return with(AlgorithmFinder.getArgon2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link BalloonHashingFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.8.0\n */\n public Hash withBalloonHashing()\n {\n return with(AlgorithmFinder.getBalloonHashingInstance());\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/HashBuilder.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "1aa4da0bd9c6402cc69a393102d544c79bfdd7d349296bce16f7cc92005f05f3"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 1, "content": "/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n", "meta": {"hash_id": "4f5e188f0fcd5732aff7d64e12d49886467b1914746a06b3899043830dfd04d7"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 2, "content": " /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param salt cryptographic salt\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addSalt(String salt)\n {\n this.salt = Utils.fromCharSequenceToBytes(salt);\n return this;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param saltAsBytes cryptographic salt as bytes array\n * @return this builder\n * @since 1.7.0\n */\n public HashBuilder addSalt(byte[] saltAsBytes)\n {\n this.salt = saltAsBytes;\n return this;\n }\n\n", "meta": {"hash_id": "65d7b46527e9fea807c2236d2799530ff07a245dc0b2c0086ddc4f370dd3f841"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 3, "content": " /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

      \n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n", "meta": {"hash_id": "29f093e63ae05a0c5409e9c6b2ea776a3308084e1c46fd6f09181fb5c0485fb1"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 4, "content": " /**\n * Add a random cryptographic salt in the hashing process with a given length.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param length the length of the salt produced\n * @return this builder\n * @throws BadParametersException if the length is non-positive\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt(int length)\n {\n if (length <= 0)\n {\n throw new BadParametersException(\"Salt cannot have a non-positive length\");\n }\n else\n {\n this.salt = SaltGenerator.generate(length);\n }\n return this;\n }\n\n", "meta": {"hash_id": "1b347991d30c42faec0970674b30147b7c6082c8784ec46027efc00c565eb765"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 5, "content": " /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n", "meta": {"hash_id": "2792507747d112eb812e8983409404df42e1487a0df6d87df054387a50085276"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 6, "content": " /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

      \n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n", "meta": {"hash_id": "4a36a32e3cd22773dd646aebde0f1b48f9cf1016952721ffb0a2053d5af37ab2"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 7, "content": " /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n", "meta": {"hash_id": "857fc352bd18a483ce7fe3f0af097b22a3522030da047d599b13bfd460bfefbb"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 8, "content": " /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n", "meta": {"hash_id": "f2f11590b7fb71317bd071c6038865b1cb37a843e8bf26654ee66455dee68669"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 9, "content": " /**\n * Hashes the previously given plain text password\n * with {@link BcryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getBcryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withBcrypt()\n {\n return with(AlgorithmFinder.getBcryptInstance());\n }\n\n", "meta": {"hash_id": "eb3af425e141859c63b208284cfbf00e943baa03aa0657c337c40f7c0976598a"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 10, "content": " /**\n * Hashes the previously given plain text password\n * with {@link ScryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getScryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withScrypt()\n {\n return with(AlgorithmFinder.getScryptInstance());\n }\n\n", "meta": {"hash_id": "a5c8a4f94db347974e6f593741c44c2cd0457a31ef5ef4cf04107e2a4e621f4e"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 11, "content": " /**\n * Hashes the previously given plain text password\n * with {@link MessageDigestFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.4.0\n */\n public Hash withMessageDigest()\n {\n return with(AlgorithmFinder.getMessageDigestInstance());\n }\n\n", "meta": {"hash_id": "1030ac5a9e3d2ce2c9e30c3c5141dea8c75cfcd400ea74fb396c1c35cce702e9"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 12, "content": " /**\n * Hashes the previously given plain text password\n * with {@link Argon2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.5.0\n */\n public Hash withArgon2()\n {\n return with(AlgorithmFinder.getArgon2Instance());\n }\n\n", "meta": {"hash_id": "5fa10d624c9916dcac0887397e5f9cfb7ed85621958d828a271dc3fbd64b5d17"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 13, "content": " /**\n * Hashes the previously given plain text password\n * with {@link BalloonHashingFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.8.0\n */\n public Hash withBalloonHashing()\n {\n return with(AlgorithmFinder.getBalloonHashingInstance());\n }\n\n}\n", "meta": {"hash_id": "759ebe25667ca32f3f73643b4bfdebb59cbbc42a0a315928b254247419d2921b"}}]}], "golden_chunks": [{"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 6, "content": " /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

      \n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n", "meta": {"hash_id": "4a36a32e3cd22773dd646aebde0f1b48f9cf1016952721ffb0a2053d5af37ab2"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 3, "content": " /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

      \n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n", "meta": {"hash_id": "29f093e63ae05a0c5409e9c6b2ea776a3308084e1c46fd6f09181fb5c0485fb1"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 1, "content": "/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n", "meta": {"hash_id": "4f5e188f0fcd5732aff7d64e12d49886467b1914746a06b3899043830dfd04d7"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "1aa4da0bd9c6402cc69a393102d544c79bfdd7d349296bce16f7cc92005f05f3"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/HashBuilder.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "How do I specify the hashing algorithm to use with the HashBuilder?", "answer": "To specify the hashing algorithm, you can use one of the following methods:\n\nHash with(HashingFunction hashingFunction)\nHash withPBKDF2()\nHash withCompressedPBKDF2()\nHash withBcrypt()\nHash withScrypt()\nHash withMessageDigest()\nHash withArgon2()\nHash withBalloonHashing()\n\nThe with(HashingFunction hashingFunction) method allows you to provide a custom implementation of the HashingFunction interface, while the other methods use predefined hashing algorithms with configurations from the `psw4j.properties` file.", "golden_doc_uuids": ["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe"], "golden_chunk_uuids": [["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", 6], ["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", 5]], "golden_documents": [{"uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param salt cryptographic salt\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addSalt(String salt)\n {\n this.salt = Utils.fromCharSequenceToBytes(salt);\n return this;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param saltAsBytes cryptographic salt as bytes array\n * @return this builder\n * @since 1.7.0\n */\n public HashBuilder addSalt(byte[] saltAsBytes)\n {\n this.salt = saltAsBytes;\n return this;\n }\n\n /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

      \n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n /**\n * Add a random cryptographic salt in the hashing process with a given length.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param length the length of the salt produced\n * @return this builder\n * @throws BadParametersException if the length is non-positive\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt(int length)\n {\n if (length <= 0)\n {\n throw new BadParametersException(\"Salt cannot have a non-positive length\");\n }\n else\n {\n this.salt = SaltGenerator.generate(length);\n }\n return this;\n }\n\n /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

      \n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link BcryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getBcryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withBcrypt()\n {\n return with(AlgorithmFinder.getBcryptInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link ScryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getScryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withScrypt()\n {\n return with(AlgorithmFinder.getScryptInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link MessageDigestFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.4.0\n */\n public Hash withMessageDigest()\n {\n return with(AlgorithmFinder.getMessageDigestInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link Argon2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.5.0\n */\n public Hash withArgon2()\n {\n return with(AlgorithmFinder.getArgon2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link BalloonHashingFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.8.0\n */\n public Hash withBalloonHashing()\n {\n return with(AlgorithmFinder.getBalloonHashingInstance());\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/HashBuilder.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "1aa4da0bd9c6402cc69a393102d544c79bfdd7d349296bce16f7cc92005f05f3"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 1, "content": "/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n", "meta": {"hash_id": "4f5e188f0fcd5732aff7d64e12d49886467b1914746a06b3899043830dfd04d7"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 2, "content": " /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param salt cryptographic salt\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addSalt(String salt)\n {\n this.salt = Utils.fromCharSequenceToBytes(salt);\n return this;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param saltAsBytes cryptographic salt as bytes array\n * @return this builder\n * @since 1.7.0\n */\n public HashBuilder addSalt(byte[] saltAsBytes)\n {\n this.salt = saltAsBytes;\n return this;\n }\n\n", "meta": {"hash_id": "65d7b46527e9fea807c2236d2799530ff07a245dc0b2c0086ddc4f370dd3f841"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 3, "content": " /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

      \n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n", "meta": {"hash_id": "29f093e63ae05a0c5409e9c6b2ea776a3308084e1c46fd6f09181fb5c0485fb1"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 4, "content": " /**\n * Add a random cryptographic salt in the hashing process with a given length.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param length the length of the salt produced\n * @return this builder\n * @throws BadParametersException if the length is non-positive\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt(int length)\n {\n if (length <= 0)\n {\n throw new BadParametersException(\"Salt cannot have a non-positive length\");\n }\n else\n {\n this.salt = SaltGenerator.generate(length);\n }\n return this;\n }\n\n", "meta": {"hash_id": "1b347991d30c42faec0970674b30147b7c6082c8784ec46027efc00c565eb765"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 5, "content": " /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n", "meta": {"hash_id": "2792507747d112eb812e8983409404df42e1487a0df6d87df054387a50085276"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 6, "content": " /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

      \n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n", "meta": {"hash_id": "4a36a32e3cd22773dd646aebde0f1b48f9cf1016952721ffb0a2053d5af37ab2"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 7, "content": " /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n", "meta": {"hash_id": "857fc352bd18a483ce7fe3f0af097b22a3522030da047d599b13bfd460bfefbb"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 8, "content": " /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n", "meta": {"hash_id": "f2f11590b7fb71317bd071c6038865b1cb37a843e8bf26654ee66455dee68669"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 9, "content": " /**\n * Hashes the previously given plain text password\n * with {@link BcryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getBcryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withBcrypt()\n {\n return with(AlgorithmFinder.getBcryptInstance());\n }\n\n", "meta": {"hash_id": "eb3af425e141859c63b208284cfbf00e943baa03aa0657c337c40f7c0976598a"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 10, "content": " /**\n * Hashes the previously given plain text password\n * with {@link ScryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getScryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withScrypt()\n {\n return with(AlgorithmFinder.getScryptInstance());\n }\n\n", "meta": {"hash_id": "a5c8a4f94db347974e6f593741c44c2cd0457a31ef5ef4cf04107e2a4e621f4e"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 11, "content": " /**\n * Hashes the previously given plain text password\n * with {@link MessageDigestFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.4.0\n */\n public Hash withMessageDigest()\n {\n return with(AlgorithmFinder.getMessageDigestInstance());\n }\n\n", "meta": {"hash_id": "1030ac5a9e3d2ce2c9e30c3c5141dea8c75cfcd400ea74fb396c1c35cce702e9"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 12, "content": " /**\n * Hashes the previously given plain text password\n * with {@link Argon2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.5.0\n */\n public Hash withArgon2()\n {\n return with(AlgorithmFinder.getArgon2Instance());\n }\n\n", "meta": {"hash_id": "5fa10d624c9916dcac0887397e5f9cfb7ed85621958d828a271dc3fbd64b5d17"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 13, "content": " /**\n * Hashes the previously given plain text password\n * with {@link BalloonHashingFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.8.0\n */\n public Hash withBalloonHashing()\n {\n return with(AlgorithmFinder.getBalloonHashingInstance());\n }\n\n}\n", "meta": {"hash_id": "759ebe25667ca32f3f73643b4bfdebb59cbbc42a0a315928b254247419d2921b"}}]}], "golden_chunks": [{"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 6, "content": " /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

      \n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n", "meta": {"hash_id": "4a36a32e3cd22773dd646aebde0f1b48f9cf1016952721ffb0a2053d5af37ab2"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 5, "content": " /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n", "meta": {"hash_id": "2792507747d112eb812e8983409404df42e1487a0df6d87df054387a50085276"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/HashBuilder.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "What is the difference between withPBKDF2() and withCompressedPBKDF2()?", "answer": "Both withPBKDF2() and withCompressedPBKDF2() use the PBKDF2 hashing algorithm, but withCompressedPBKDF2() uses a compressed version of PBKDF2. The compressed version is obtained using the AlgorithmFinder.getCompressedPBKDF2Instance() method, while the regular PBKDF2 is obtained using AlgorithmFinder.getPBKDF2Instance().", "golden_doc_uuids": ["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe"], "golden_chunk_uuids": [["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", 8], ["4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", 7]], "golden_documents": [{"uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param salt cryptographic salt\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addSalt(String salt)\n {\n this.salt = Utils.fromCharSequenceToBytes(salt);\n return this;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param saltAsBytes cryptographic salt as bytes array\n * @return this builder\n * @since 1.7.0\n */\n public HashBuilder addSalt(byte[] saltAsBytes)\n {\n this.salt = saltAsBytes;\n return this;\n }\n\n /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

      \n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n /**\n * Add a random cryptographic salt in the hashing process with a given length.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param length the length of the salt produced\n * @return this builder\n * @throws BadParametersException if the length is non-positive\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt(int length)\n {\n if (length <= 0)\n {\n throw new BadParametersException(\"Salt cannot have a non-positive length\");\n }\n else\n {\n this.salt = SaltGenerator.generate(length);\n }\n return this;\n }\n\n /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

      \n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link BcryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getBcryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withBcrypt()\n {\n return with(AlgorithmFinder.getBcryptInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link ScryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getScryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withScrypt()\n {\n return with(AlgorithmFinder.getScryptInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link MessageDigestFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.4.0\n */\n public Hash withMessageDigest()\n {\n return with(AlgorithmFinder.getMessageDigestInstance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link Argon2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.5.0\n */\n public Hash withArgon2()\n {\n return with(AlgorithmFinder.getArgon2Instance());\n }\n\n /**\n * Hashes the previously given plain text password\n * with {@link BalloonHashingFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.8.0\n */\n public Hash withBalloonHashing()\n {\n return with(AlgorithmFinder.getBalloonHashingInstance());\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/HashBuilder.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "1aa4da0bd9c6402cc69a393102d544c79bfdd7d349296bce16f7cc92005f05f3"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 1, "content": "/**\n * Builder class that helps to create a chain of parameters to be used\n * in the hashing process.\n *\n * @author David Bertoldi\n * @since 1.0.0\n */\npublic class HashBuilder\n{\n private byte[] plainTextPassword;\n\n protected byte[] salt;\n\n protected CharSequence pepper;\n\n @SuppressWarnings(\"unused\")\n private HashBuilder()\n {\n //\n }\n\n /**\n * @param plainTextPassword the plain text password\n * @since 1.0.0\n */\n protected HashBuilder(CharSequence plainTextPassword)\n {\n this.plainTextPassword = Utils.fromCharSequenceToBytes(plainTextPassword);\n }\n\n /**\n * @param plainTextPasswordAsBytes the plain text password as bytes array\n * @since 1.7.0\n */\n protected HashBuilder(byte[] plainTextPasswordAsBytes)\n {\n this.plainTextPassword = plainTextPasswordAsBytes;\n }\n\n", "meta": {"hash_id": "4f5e188f0fcd5732aff7d64e12d49886467b1914746a06b3899043830dfd04d7"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 2, "content": " /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param salt cryptographic salt\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addSalt(String salt)\n {\n this.salt = Utils.fromCharSequenceToBytes(salt);\n return this;\n }\n\n /**\n * Add a cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param saltAsBytes cryptographic salt as bytes array\n * @return this builder\n * @since 1.7.0\n */\n public HashBuilder addSalt(byte[] saltAsBytes)\n {\n this.salt = saltAsBytes;\n return this;\n }\n\n", "meta": {"hash_id": "65d7b46527e9fea807c2236d2799530ff07a245dc0b2c0086ddc4f370dd3f841"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 3, "content": " /**\n * Add a random cryptographic salt in the hashing process.\n * The salt is applied differently depending on the chosen algorithm.\n *

      \n * Calling this method can be omitted for all the CHFs that require a salt.\n *\n * @return this builder\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt()\n {\n this.salt = SaltGenerator.generate();\n return this;\n }\n\n", "meta": {"hash_id": "29f093e63ae05a0c5409e9c6b2ea776a3308084e1c46fd6f09181fb5c0485fb1"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 4, "content": " /**\n * Add a random cryptographic salt in the hashing process with a given length.\n * The salt is applied differently depending on the chosen algorithm.\n *\n * @param length the length of the salt produced\n * @return this builder\n * @throws BadParametersException if the length is non-positive\n * @see SaltGenerator#generate() for more information about the length of the product\n * @since 1.0.0\n */\n public HashBuilder addRandomSalt(int length)\n {\n if (length <= 0)\n {\n throw new BadParametersException(\"Salt cannot have a non-positive length\");\n }\n else\n {\n this.salt = SaltGenerator.generate(length);\n }\n return this;\n }\n\n", "meta": {"hash_id": "1b347991d30c42faec0970674b30147b7c6082c8784ec46027efc00c565eb765"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 5, "content": " /**\n * Concatenates the pepper configured in your `psw4j.properties` file with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @return this builder\n * @see PepperGenerator#get()\n */\n public HashBuilder addPepper()\n {\n this.pepper = PepperGenerator.get();\n return this;\n }\n\n /**\n * Concatenates the provided string with the plain text password.\n * The produced sequence (in the form {@code pepper+password}) is processed by the algorithm.\n *\n * @param pepper cryptographic pepper\n * @return this builder\n * @since 1.0.0\n */\n public HashBuilder addPepper(CharSequence pepper)\n {\n this.pepper = pepper;\n return this;\n }\n\n", "meta": {"hash_id": "2792507747d112eb812e8983409404df42e1487a0df6d87df054387a50085276"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 6, "content": " /**\n * Hashes the previously given plain text password\n * with a specific implementation of {@link HashingFunction}.\n *

      \n * This method does not read the configurations in the `psw4j.properties` file.\n *\n * @param hashingFunction a CHF\n * @return a {@link Hash} object\n * @since 1.0.0\n */\n public Hash with(HashingFunction hashingFunction)\n {\n return hashingFunction.hash(plainTextPassword, salt, pepper);\n }\n\n", "meta": {"hash_id": "4a36a32e3cd22773dd646aebde0f1b48f9cf1016952721ffb0a2053d5af37ab2"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 7, "content": " /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n", "meta": {"hash_id": "857fc352bd18a483ce7fe3f0af097b22a3522030da047d599b13bfd460bfefbb"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 8, "content": " /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n", "meta": {"hash_id": "f2f11590b7fb71317bd071c6038865b1cb37a843e8bf26654ee66455dee68669"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 9, "content": " /**\n * Hashes the previously given plain text password\n * with {@link BcryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getBcryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withBcrypt()\n {\n return with(AlgorithmFinder.getBcryptInstance());\n }\n\n", "meta": {"hash_id": "eb3af425e141859c63b208284cfbf00e943baa03aa0657c337c40f7c0976598a"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 10, "content": " /**\n * Hashes the previously given plain text password\n * with {@link ScryptFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getScryptInstance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withScrypt()\n {\n return with(AlgorithmFinder.getScryptInstance());\n }\n\n", "meta": {"hash_id": "a5c8a4f94db347974e6f593741c44c2cd0457a31ef5ef4cf04107e2a4e621f4e"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 11, "content": " /**\n * Hashes the previously given plain text password\n * with {@link MessageDigestFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.4.0\n */\n public Hash withMessageDigest()\n {\n return with(AlgorithmFinder.getMessageDigestInstance());\n }\n\n", "meta": {"hash_id": "1030ac5a9e3d2ce2c9e30c3c5141dea8c75cfcd400ea74fb396c1c35cce702e9"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 12, "content": " /**\n * Hashes the previously given plain text password\n * with {@link Argon2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.5.0\n */\n public Hash withArgon2()\n {\n return with(AlgorithmFinder.getArgon2Instance());\n }\n\n", "meta": {"hash_id": "5fa10d624c9916dcac0887397e5f9cfb7ed85621958d828a271dc3fbd64b5d17"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 13, "content": " /**\n * Hashes the previously given plain text password\n * with {@link BalloonHashingFunction}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getArgon2Instance()\n * @see #with(HashingFunction)\n * @since 1.8.0\n */\n public Hash withBalloonHashing()\n {\n return with(AlgorithmFinder.getBalloonHashingInstance());\n }\n\n}\n", "meta": {"hash_id": "759ebe25667ca32f3f73643b4bfdebb59cbbc42a0a315928b254247419d2921b"}}]}], "golden_chunks": [{"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 8, "content": " /**\n * Hashes the previously given plain text password\n * with {@link CompressedPBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return an {@link Hash} object\n * @see AlgorithmFinder#getCompressedPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withCompressedPBKDF2()\n {\n return with(AlgorithmFinder.getCompressedPBKDF2Instance());\n }\n\n", "meta": {"hash_id": "f2f11590b7fb71317bd071c6038865b1cb37a843e8bf26654ee66455dee68669"}}, {"doc_uuid": "4455bcd2fc69f3f80a0322dd01f54d8b880a366e519955adca455ce4f51efbbe", "index": 7, "content": " /**\n * Hashes the previously given plain text password\n * with {@link PBKDF2Function}.\n *

      \n * This method reads the configurations in the `psw4j.properties` file. If no configuration is found,\n * then the default parameters are used.\n *

      \n * Finally calls {@link #with(HashingFunction)}\n *\n * @return a {@link Hash} object\n * @see AlgorithmFinder#getPBKDF2Instance()\n * @see #with(HashingFunction)\n * @since 1.0.0\n */\n public Hash withPBKDF2()\n {\n return with(AlgorithmFinder.getPBKDF2Instance());\n }\n\n", "meta": {"hash_id": "857fc352bd18a483ce7fe3f0af097b22a3522030da047d599b13bfd460bfefbb"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/HashBuilder.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "How do you create an instance of MessageDigestFunction with a specific hashing algorithm and salt option?", "answer": "To create an instance of MessageDigestFunction with a specific hashing algorithm and salt option, you can use the static factory method `MessageDigestFunction.getInstance(String algorithm, SaltOption saltOption)`. For example:\n```java\nHashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n```\nThis creates an instance of MessageDigestFunction that uses the MD5 hashing algorithm and prepends the salt to the password before hashing.", "golden_doc_uuids": ["d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2"], "golden_chunk_uuids": [["d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", 2]], "golden_documents": [{"uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/MessageDigestFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "e3013c74fe9760ac9258ae43077b2bc15b486ec0191b022df116fc1ab87417aa"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 1, "content": "import com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n", "meta": {"hash_id": "7eef44c4f4000300a7a3c55418a3318cae9f823df2738ca7a744b222d242a2c6"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 2, "content": "\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n", "meta": {"hash_id": "8ad2f67f3c576c20e0034d23b6b74987f29605e477bb0e1b4aa17d03c163338b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 3, "content": " @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n", "meta": {"hash_id": "4a396b54f33402f1d76bc505d8d33112eb03edf258cc4ffcb162205d3b53fac8"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 4, "content": " // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n", "meta": {"hash_id": "66e0337a1a52103c5bec5830baf4cfb7b359b6a80f9d2e53f23e9f4f7983ba8b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 5, "content": " @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n", "meta": {"hash_id": "21489de22b400088056586d9e34e92d54d22171ad4c80fa6869024526a644f9f"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 6, "content": " // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "8806410eb886ae0f6de0c58d617ee89dfbfa1096c5f93ded2cac0903130618f6"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 7, "content": "\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n", "meta": {"hash_id": "a6d2bf3b45f4b25bdd84fb3dbaafa96806ea7a707947b1bf044236a79bb80a33"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 8, "content": "\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n", "meta": {"hash_id": "34e3ec8905d597811367a25d9b934335b8f56ba75c5e3a44bc821938aa09f35d"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 9, "content": " @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n", "meta": {"hash_id": "df905794c1f30daed1153e42006f5a9c7809a7ac748406b71a4458dc14155e4b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 10, "content": "\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n", "meta": {"hash_id": "d96ed18499e3bf66c5348102c0144e49ef36f007dcdb41a80f3c4f7aec3f099f"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 11, "content": " String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n", "meta": {"hash_id": "8bc439bb2011be543babc3850d466b82964763bbf4066f5b72efc9cd8f484dc7"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 12, "content": " // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n", "meta": {"hash_id": "2f11bed1b4653d920d7dd7098593efd5a0742074f033ae4ea0da5e98950c2594"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 13, "content": "\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n", "meta": {"hash_id": "4d6211a486aa405634af503eebdd02ba1ca76522e1f52374eaf06112b1351b02"}}]}], "golden_chunks": [{"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 2, "content": "\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n", "meta": {"hash_id": "8ad2f67f3c576c20e0034d23b6b74987f29605e477bb0e1b4aa17d03c163338b"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/MessageDigestFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "How do you hash a password using MessageDigestFunction?", "answer": "To hash a password using MessageDigestFunction, you can call the `hash(String password)` or `hash(String password, String salt)` method on the instance. For example:\n```java\nHashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\nString password = \"password\";\nString salt = \"abc\";\n\nHash hash = strategy.hash(password);\nHash hashWithSalt = strategy.hash(password, salt);\n```\nThe `hash(String password)` method hashes the password without a salt, while the `hash(String password, String salt)` method hashes the password with the provided salt.", "golden_doc_uuids": ["d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2"], "golden_chunk_uuids": [["d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", 1]], "golden_documents": [{"uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/MessageDigestFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "e3013c74fe9760ac9258ae43077b2bc15b486ec0191b022df116fc1ab87417aa"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 1, "content": "import com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n", "meta": {"hash_id": "7eef44c4f4000300a7a3c55418a3318cae9f823df2738ca7a744b222d242a2c6"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 2, "content": "\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n", "meta": {"hash_id": "8ad2f67f3c576c20e0034d23b6b74987f29605e477bb0e1b4aa17d03c163338b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 3, "content": " @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n", "meta": {"hash_id": "4a396b54f33402f1d76bc505d8d33112eb03edf258cc4ffcb162205d3b53fac8"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 4, "content": " // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n", "meta": {"hash_id": "66e0337a1a52103c5bec5830baf4cfb7b359b6a80f9d2e53f23e9f4f7983ba8b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 5, "content": " @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n", "meta": {"hash_id": "21489de22b400088056586d9e34e92d54d22171ad4c80fa6869024526a644f9f"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 6, "content": " // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "8806410eb886ae0f6de0c58d617ee89dfbfa1096c5f93ded2cac0903130618f6"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 7, "content": "\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n", "meta": {"hash_id": "a6d2bf3b45f4b25bdd84fb3dbaafa96806ea7a707947b1bf044236a79bb80a33"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 8, "content": "\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n", "meta": {"hash_id": "34e3ec8905d597811367a25d9b934335b8f56ba75c5e3a44bc821938aa09f35d"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 9, "content": " @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n", "meta": {"hash_id": "df905794c1f30daed1153e42006f5a9c7809a7ac748406b71a4458dc14155e4b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 10, "content": "\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n", "meta": {"hash_id": "d96ed18499e3bf66c5348102c0144e49ef36f007dcdb41a80f3c4f7aec3f099f"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 11, "content": " String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n", "meta": {"hash_id": "8bc439bb2011be543babc3850d466b82964763bbf4066f5b72efc9cd8f484dc7"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 12, "content": " // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n", "meta": {"hash_id": "2f11bed1b4653d920d7dd7098593efd5a0742074f033ae4ea0da5e98950c2594"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 13, "content": "\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n", "meta": {"hash_id": "4d6211a486aa405634af503eebdd02ba1ca76522e1f52374eaf06112b1351b02"}}]}], "golden_chunks": [{"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 1, "content": "import com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n", "meta": {"hash_id": "7eef44c4f4000300a7a3c55418a3318cae9f823df2738ca7a744b222d242a2c6"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/MessageDigestFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "How do you check if a password matches a hashed value using MessageDigestFunction?", "answer": "To check if a password matches a hashed value using MessageDigestFunction, you can call the `check(String password, String hashedPassword)` or `check(String password, String hashedPassword, String salt)` method on the instance. For example:\n```java\nHashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\nString password = \"password\";\nString salt = \"abc\";\n\nHash hash = strategy.hash(password);\nHash hashWithSalt = strategy.hash(password, salt);\n\nAssert.assertTrue(strategy.check(password, hash.getResult()));\nAssert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n```\nThe `check(String password, String hashedPassword)` method checks if the password matches the hashed password without a salt, while the `check(String password, String hashedPassword, String salt)` method checks if the password matches the hashed password with the provided salt.", "golden_doc_uuids": ["d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2"], "golden_chunk_uuids": [["d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", 1], ["d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", 0]], "golden_documents": [{"uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/MessageDigestFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "e3013c74fe9760ac9258ae43077b2bc15b486ec0191b022df116fc1ab87417aa"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 1, "content": "import com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n", "meta": {"hash_id": "7eef44c4f4000300a7a3c55418a3318cae9f823df2738ca7a744b222d242a2c6"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 2, "content": "\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n", "meta": {"hash_id": "8ad2f67f3c576c20e0034d23b6b74987f29605e477bb0e1b4aa17d03c163338b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 3, "content": " @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n", "meta": {"hash_id": "4a396b54f33402f1d76bc505d8d33112eb03edf258cc4ffcb162205d3b53fac8"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 4, "content": " // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n", "meta": {"hash_id": "66e0337a1a52103c5bec5830baf4cfb7b359b6a80f9d2e53f23e9f4f7983ba8b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 5, "content": " @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n", "meta": {"hash_id": "21489de22b400088056586d9e34e92d54d22171ad4c80fa6869024526a644f9f"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 6, "content": " // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "8806410eb886ae0f6de0c58d617ee89dfbfa1096c5f93ded2cac0903130618f6"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 7, "content": "\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n", "meta": {"hash_id": "a6d2bf3b45f4b25bdd84fb3dbaafa96806ea7a707947b1bf044236a79bb80a33"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 8, "content": "\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n", "meta": {"hash_id": "34e3ec8905d597811367a25d9b934335b8f56ba75c5e3a44bc821938aa09f35d"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 9, "content": " @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n", "meta": {"hash_id": "df905794c1f30daed1153e42006f5a9c7809a7ac748406b71a4458dc14155e4b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 10, "content": "\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n", "meta": {"hash_id": "d96ed18499e3bf66c5348102c0144e49ef36f007dcdb41a80f3c4f7aec3f099f"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 11, "content": " String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n", "meta": {"hash_id": "8bc439bb2011be543babc3850d466b82964763bbf4066f5b72efc9cd8f484dc7"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 12, "content": " // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n", "meta": {"hash_id": "2f11bed1b4653d920d7dd7098593efd5a0742074f033ae4ea0da5e98950c2594"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 13, "content": "\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n", "meta": {"hash_id": "4d6211a486aa405634af503eebdd02ba1ca76522e1f52374eaf06112b1351b02"}}]}], "golden_chunks": [{"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 1, "content": "import com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n", "meta": {"hash_id": "7eef44c4f4000300a7a3c55418a3318cae9f823df2738ca7a744b222d242a2c6"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "e3013c74fe9760ac9258ae43077b2bc15b486ec0191b022df116fc1ab87417aa"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/MessageDigestFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "How can you retrieve the hashing algorithm and salt option used by a MessageDigestFunction instance?", "answer": "You can retrieve the hashing algorithm and salt option used by a MessageDigestFunction instance using the `getAlgorithm()` and `getSaltOption()` methods, respectively. For example:\n```java\nMessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\nassertEquals(\"MD5\", function.getAlgorithm());\nassertEquals(SaltOption.APPEND, function.getSaltOption());\n```\nThe `getAlgorithm()` method returns the hashing algorithm as a string, and the `getSaltOption()` method returns the salt option as an instance of the `SaltOption` enum.", "golden_doc_uuids": ["d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2"], "golden_chunk_uuids": [["d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", 11]], "golden_documents": [{"uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\nimport com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/MessageDigestFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 0, "content": "/*\n * (C) Copyright 2020 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j;\n\n", "meta": {"hash_id": "e3013c74fe9760ac9258ae43077b2bc15b486ec0191b022df116fc1ab87417aa"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 1, "content": "import com.password4j.types.Bcrypt;\nimport com.password4j.types.Hmac;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.util.Base64;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\n\n\npublic class MessageDigestFunctionTest\n{\n\n\n @Test\n public void testMD5()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password, salt);\n\n // THEN\n assertEquals(\"8223fe8dc0533c6ebbb717e7fda2833c\", hash.getResult());\n }\n\n", "meta": {"hash_id": "7eef44c4f4000300a7a3c55418a3318cae9f823df2738ca7a744b222d242a2c6"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 2, "content": "\n @Test\n public void testMD5noSalt()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"MD5\");\n String password = \"password\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n\n // THEN\n assertEquals(\"5f4dcc3b5aa765d61d8327deb882cf99\", hash.getResult());\n }\n\n @Test\n public void testDifferentConcatenations()\n {\n // GIVEN\n HashingFunction strategy1 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.PREPEND);\n HashingFunction strategy2 = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash1 = strategy1.hash(password, salt);\n Hash hash2 = strategy2.hash(password, salt);\n\n // THEN\n Assert.assertNotEquals(hash1.getResult(), hash2.getResult());\n }\n\n", "meta": {"hash_id": "8ad2f67f3c576c20e0034d23b6b74987f29605e477bb0e1b4aa17d03c163338b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 3, "content": " @Test\n public void testMDVariants()\n {\n Set algorithms = AlgorithmFinder.getAllMessageDigests();\n for (String alg : algorithms)\n {\n // GIVEN\n MessageDigestFunction strategy = MessageDigestFunction.getInstance(alg);\n String password = \"password\";\n String salt = \"abc\";\n\n // WHEN\n Hash hash = strategy.hash(password);\n Hash hashWithSalt = strategy.hash(password, salt);\n\n // THEN\n Assert.assertTrue(strategy.check(password, hash.getResult()));\n Assert.assertTrue(strategy.check(password, hashWithSalt.getResult(), salt));\n }\n }\n\n @Test(expected = UnsupportedOperationException.class)\n public void testMDWrongAlgorithm()\n {\n // GIVEN\n HashingFunction strategy = MessageDigestFunction.getInstance(\"notAnAlgorithm\");\n String password = \"password\";\n String salt = \"abc\";\n\n", "meta": {"hash_id": "4a396b54f33402f1d76bc505d8d33112eb03edf258cc4ffcb162205d3b53fac8"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 4, "content": " // WHEN\n strategy.hash(password, salt);\n\n // THEN\n }\n\n @Test\n public void testMDWrongSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"1234\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n }\n\n", "meta": {"hash_id": "66e0337a1a52103c5bec5830baf4cfb7b359b6a80f9d2e53f23e9f4f7983ba8b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 5, "content": " @Test\n public void testMDRightSaltOption()\n {\n // GIVEN\n\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"prepend\");\n\n // WHEN\n MessageDigestFunction function = AlgorithmFinder.getMessageDigestInstance();\n\n // THEN\n assertEquals(SaltOption.PREPEND, function.getSaltOption());\n PropertyReader.properties.setProperty(\"hash.md.salt.option\", \"append\");\n\n }\n\n\n @Test\n public void testPBKDF2Check()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n", "meta": {"hash_id": "21489de22b400088056586d9e34e92d54d22171ad4c80fa6869024526a644f9f"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 6, "content": " // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2WrongCheck2()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String badHash = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n // THEN\n try {\n Assert.assertTrue(strategy.check(userSubmittedPassword, badHash));\n } catch (BadParametersException ex) {\n assertEquals(\"`\" + badHash + \"` is not a valid hash\", ex.getMessage());\n }\n }\n\n", "meta": {"hash_id": "8806410eb886ae0f6de0c58d617ee89dfbfa1096c5f93ded2cac0903130618f6"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 7, "content": "\n @Test(expected = BadParametersException.class)\n public void testPBKDF2BadCheck()\n {\n // GIVEN\n String hashed = \"$342949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n CompressedPBKDF2Function.getInstanceFromHash(hashed);\n\n\n }\n\n @Test\n public void testAlgorithmFromCode()\n {\n // GIVEN\n\n // WHEN\n Hmac algNull = Hmac.fromCode(-100);\n for (Hmac enumAlg : Hmac.values())\n {\n Hmac alg = Hmac.fromCode(enumAlg.code());\n\n", "meta": {"hash_id": "a6d2bf3b45f4b25bdd84fb3dbaafa96806ea7a707947b1bf044236a79bb80a33"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 8, "content": "\n // THEN\n Assert.assertNotNull(alg);\n assertEquals(enumAlg.code(), alg.code());\n assertEquals(enumAlg.bits(), alg.bits());\n }\n Assert.assertNull(algNull);\n\n\n }\n\n @Test\n public void testPBKDF2Coherence()\n {\n // GIVEN\n String password = \"password\";\n\n // WHEN\n Hash hash = PBKDF2Function.getInstance(Hmac.SHA256, 8_777, 256).hash(password);\n\n // THEN\n Assert.assertTrue(Password.check(password, hash));\n\n }\n\n", "meta": {"hash_id": "34e3ec8905d597811367a25d9b934335b8f56ba75c5e3a44bc821938aa09f35d"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 9, "content": " @Test\n public void testPBKDF2CheckWithFixedConfigurations()\n {\n // GIVEN\n String hashed = \"$3$42949672960256$YWJj$/WTQfTTc8Hg8GlplP0LthpgdElUG+I3MyuvK8MI4MnQ=\";\n String userSubmittedPassword = \"password\";\n\n // WHEN\n HashingFunction strategy = new CompressedPBKDF2Function(Hmac.SHA256, 10_000, 256);\n\n // THEN\n Assert.assertTrue(strategy.check(userSubmittedPassword, hashed));\n }\n\n\n @Test\n public void testPBKDF2equality()\n {\n // GIVEN\n PBKDF2Function strategy1 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy2 = PBKDF2Function.getInstance(Hmac.SHA256, 10_000, 256);\n PBKDF2Function strategy3 = PBKDF2Function.getInstance(Hmac.SHA1, 10_000, 256);\n PBKDF2Function strategy4 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 256);\n PBKDF2Function strategy5 = PBKDF2Function.getInstance(Hmac.SHA256, 64_000, 123);\n\n", "meta": {"hash_id": "df905794c1f30daed1153e42006f5a9c7809a7ac748406b71a4458dc14155e4b"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 10, "content": "\n // WHEN\n Map map = new HashMap<>();\n map.put(strategy1, strategy1.toString());\n map.put(strategy2, strategy2.toString());\n map.put(strategy3, strategy3.toString());\n map.put(strategy4, strategy4.toString());\n map.put(strategy5, strategy5.toString());\n\n\n // THEN\n assertEquals(4, map.size());\n assertEquals(strategy1, strategy2);\n }\n\n @Test\n public void testCompressed()\n {\n Hmac algorithm = Hmac.SHA512;\n\n\n for (int i = 1; i <= 100; i++)\n {\n String password = PepperGenerator.generate(12);\n String salt = PepperGenerator.generate(i);\n Hash hash = CompressedPBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n Hash notCompressedHash = PBKDF2Function.getInstance(algorithm, 100 * i, algorithm.bits()).hash(password, salt);\n\n", "meta": {"hash_id": "d96ed18499e3bf66c5348102c0144e49ef36f007dcdb41a80f3c4f7aec3f099f"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 11, "content": " String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n", "meta": {"hash_id": "8bc439bb2011be543babc3850d466b82964763bbf4066f5b72efc9cd8f484dc7"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 12, "content": " // THEN\n boolean eqNull = function.equals(null);\n boolean eqClass = function.equals(new BcryptFunction(Bcrypt.A, 10));\n boolean sameInst = function.equals(MessageDigestFunction.getInstance(a, o));\n boolean sameInst2 = function.equals(new MessageDigestFunction(a, o));\n String toString = function.toString();\n int hashCode = function.hashCode();\n boolean notSameInst1 = function.equals(new MessageDigestFunction(\"SHA1\", o));\n boolean notSameInst2 = function.equals(new MessageDigestFunction(a, SaltOption.PREPEND));\n\n", "meta": {"hash_id": "2f11bed1b4653d920d7dd7098593efd5a0742074f033ae4ea0da5e98950c2594"}}, {"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 13, "content": "\n // END\n Assert.assertFalse(eqNull);\n Assert.assertFalse(eqClass);\n Assert.assertTrue(sameInst);\n Assert.assertTrue(sameInst2);\n Assert.assertNotEquals(toString, new MessageDigestFunction(\"SHA1\", o).toString());\n Assert.assertNotEquals(hashCode, new MessageDigestFunction(a, SaltOption.PREPEND).hashCode());\n Assert.assertFalse(notSameInst1);\n Assert.assertFalse(notSameInst2);\n }\n\n}\n", "meta": {"hash_id": "4d6211a486aa405634af503eebdd02ba1ca76522e1f52374eaf06112b1351b02"}}]}], "golden_chunks": [{"doc_uuid": "d08c07ecf2fa3858f8e744e51c3c6db56b2a73be61e2b4b68ef9007697320ec2", "index": 11, "content": " String params = Long.toString((((long) 100 * i) << 32) | (algorithm.bits() & 0xffffffffL));\n String expected = \"$\" + algorithm.code() + \"$\" + params + \"$\" + Base64.getEncoder().encodeToString(salt.getBytes(Utils.DEFAULT_CHARSET)) + \"$\" + notCompressedHash.getResult();\n\n assertEquals(expected, hash.getResult());\n }\n }\n\n @Test\n public void testAccessors()\n {\n // GIVEN\n\n\n // WHEN\n MessageDigestFunction function = MessageDigestFunction.getInstance(\"MD5\", SaltOption.APPEND);\n\n // THEN\n assertEquals(\"MD5\", function.getAlgorithm());\n assertEquals(SaltOption.APPEND, function.getSaltOption());\n assertEquals(\"MessageDigestFunction(a=MD5, o=APPEND)\", function.toString());\n }\n\n @Test\n public void testEquality()\n {\n // GIVEN\n String a = \"MD5\";\n SaltOption o = SaltOption.APPEND;\n MessageDigestFunction function = MessageDigestFunction.getInstance(a, o);\n\n", "meta": {"hash_id": "8bc439bb2011be543babc3850d466b82964763bbf4066f5b72efc9cd8f484dc7"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/MessageDigestFunctionTest.java", "repo_name": "Password4j/password4j", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "What does the test method `issue92()` do?", "answer": "The test method `issue92()` verifies the functionality of the password hashing and verification using Argon2. It takes a predefined hash and plain password, creates an Argon2Function instance from the hash, and then verifies the plain password against the hash. It also creates a new hash using the same plain password and salt, and verifies the plain password against the new hash. The test asserts that both verifications pass and the new hash matches the expected value.", "golden_doc_uuids": ["36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a"], "golden_chunk_uuids": [["36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", 1]], "golden_documents": [{"uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"’(っ^▿^)۶\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A٩(˘◡˘ ) ❌❌ ❌❌❌\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"ŸŁĀPRČ\")\n .addSalt(\"ŸŁĀPRČAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/IssuesTest.java", "repo_name": "Password4j/password4j", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 0, "content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n", "meta": {"hash_id": "d5475189421669ce35822d4bcea359553534b705776cfd40d29fa8113814d97c"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 1, "content": " boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n", "meta": {"hash_id": "eb427a68c44ab6810661efaff8306e5d2dbbdf3962598c5ef00c2092c5d9c933"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 2, "content": "\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n", "meta": {"hash_id": "f73527ebfb4b6a4b0cb3df2c810c7a6944faa3a6d8e5fc5ace94bc6836edea28"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 3, "content": " (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n", "meta": {"hash_id": "2dbdb4f14bd66be04caaa3754b0d90ad7d47bf1f680f6f382aa6198444c08339"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 4, "content": " /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n", "meta": {"hash_id": "c82e6391746775c1f51dc26c5741739be7c84a8fb99801dcafd5b168a6115855"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 5, "content": "\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n", "meta": {"hash_id": "7f42af91092849f6c1b66d48f42dddfcbf638f9bf6a98d87711234ff54e80e20"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 6, "content": " // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"’(っ^▿^)۶\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A٩(˘◡˘ ) ❌❌ ❌❌❌\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n", "meta": {"hash_id": "e0b09563621f1ee95fb92714496bb9922d6ae8ac33a52925bbc09c99d5eed304"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 7, "content": " Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"ŸŁĀPRČ\")\n .addSalt(\"ŸŁĀPRČAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n", "meta": {"hash_id": "7f3faf62733cfdf1e86a126b33b33caa562f4f767a45ce4e895bc57b2c22c97a"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 8, "content": " private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n", "meta": {"hash_id": "1c9c14b92a9898e99aa8f37013cef3822f384351ce01b6191e5b9c4a45e426ae"}}]}], "golden_chunks": [{"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 1, "content": " boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n", "meta": {"hash_id": "eb427a68c44ab6810661efaff8306e5d2dbbdf3962598c5ef00c2092c5d9c933"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/IssuesTest.java", "repo_name": "Password4j/password4j", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "What does the test method `issue99()` do?", "answer": "The test method `issue99()` tests the Argon2 hashing function with specific parameters. It sets the memory, iterations, parallelism, output length, version, salt, and password values. It then creates an Argon2Function instance with the specified parameters and hashes the password using the salt. Finally, it compares the resulting hash bytes with an expected result string.", "golden_doc_uuids": ["36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a"], "golden_chunk_uuids": [["36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", 2]], "golden_documents": [{"uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"’(っ^▿^)۶\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A٩(˘◡˘ ) ❌❌ ❌❌❌\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"ŸŁĀPRČ\")\n .addSalt(\"ŸŁĀPRČAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/IssuesTest.java", "repo_name": "Password4j/password4j", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 0, "content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n", "meta": {"hash_id": "d5475189421669ce35822d4bcea359553534b705776cfd40d29fa8113814d97c"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 1, "content": " boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n", "meta": {"hash_id": "eb427a68c44ab6810661efaff8306e5d2dbbdf3962598c5ef00c2092c5d9c933"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 2, "content": "\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n", "meta": {"hash_id": "f73527ebfb4b6a4b0cb3df2c810c7a6944faa3a6d8e5fc5ace94bc6836edea28"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 3, "content": " (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n", "meta": {"hash_id": "2dbdb4f14bd66be04caaa3754b0d90ad7d47bf1f680f6f382aa6198444c08339"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 4, "content": " /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n", "meta": {"hash_id": "c82e6391746775c1f51dc26c5741739be7c84a8fb99801dcafd5b168a6115855"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 5, "content": "\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n", "meta": {"hash_id": "7f42af91092849f6c1b66d48f42dddfcbf638f9bf6a98d87711234ff54e80e20"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 6, "content": " // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"’(っ^▿^)۶\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A٩(˘◡˘ ) ❌❌ ❌❌❌\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n", "meta": {"hash_id": "e0b09563621f1ee95fb92714496bb9922d6ae8ac33a52925bbc09c99d5eed304"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 7, "content": " Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"ŸŁĀPRČ\")\n .addSalt(\"ŸŁĀPRČAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n", "meta": {"hash_id": "7f3faf62733cfdf1e86a126b33b33caa562f4f767a45ce4e895bc57b2c22c97a"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 8, "content": " private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n", "meta": {"hash_id": "1c9c14b92a9898e99aa8f37013cef3822f384351ce01b6191e5b9c4a45e426ae"}}]}], "golden_chunks": [{"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 2, "content": "\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n", "meta": {"hash_id": "f73527ebfb4b6a4b0cb3df2c810c7a6944faa3a6d8e5fc5ace94bc6836edea28"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/IssuesTest.java", "repo_name": "Password4j/password4j", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "What does the test method `issue93()` do?", "answer": "The test method `issue93()` tests the password verification functionality using Argon2. It takes a predefined hash and plain password, creates an Argon2Function instance from the hash, and then verifies the plain password against the hash using two different approaches:\n- `Password.check(\"Test123!\", hash).with(function)`\n- `function.check(\"Test123!\", hash)`\nThe test asserts that both verifications pass.", "golden_doc_uuids": ["36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a"], "golden_chunk_uuids": [["36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", 4]], "golden_documents": [{"uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"’(っ^▿^)۶\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A٩(˘◡˘ ) ❌❌ ❌❌❌\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"ŸŁĀPRČ\")\n .addSalt(\"ŸŁĀPRČAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/IssuesTest.java", "repo_name": "Password4j/password4j", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 0, "content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n", "meta": {"hash_id": "d5475189421669ce35822d4bcea359553534b705776cfd40d29fa8113814d97c"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 1, "content": " boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n", "meta": {"hash_id": "eb427a68c44ab6810661efaff8306e5d2dbbdf3962598c5ef00c2092c5d9c933"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 2, "content": "\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n", "meta": {"hash_id": "f73527ebfb4b6a4b0cb3df2c810c7a6944faa3a6d8e5fc5ace94bc6836edea28"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 3, "content": " (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n", "meta": {"hash_id": "2dbdb4f14bd66be04caaa3754b0d90ad7d47bf1f680f6f382aa6198444c08339"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 4, "content": " /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n", "meta": {"hash_id": "c82e6391746775c1f51dc26c5741739be7c84a8fb99801dcafd5b168a6115855"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 5, "content": "\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n", "meta": {"hash_id": "7f42af91092849f6c1b66d48f42dddfcbf638f9bf6a98d87711234ff54e80e20"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 6, "content": " // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"’(っ^▿^)۶\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A٩(˘◡˘ ) ❌❌ ❌❌❌\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n", "meta": {"hash_id": "e0b09563621f1ee95fb92714496bb9922d6ae8ac33a52925bbc09c99d5eed304"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 7, "content": " Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"ŸŁĀPRČ\")\n .addSalt(\"ŸŁĀPRČAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n", "meta": {"hash_id": "7f3faf62733cfdf1e86a126b33b33caa562f4f767a45ce4e895bc57b2c22c97a"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 8, "content": " private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n", "meta": {"hash_id": "1c9c14b92a9898e99aa8f37013cef3822f384351ce01b6191e5b9c4a45e426ae"}}]}], "golden_chunks": [{"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 4, "content": " /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n", "meta": {"hash_id": "c82e6391746775c1f51dc26c5741739be7c84a8fb99801dcafd5b168a6115855"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/IssuesTest.java", "repo_name": "Password4j/password4j", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "What does the test method `issue120()` do?", "answer": "The test method `issue120()` tests the behavior of the library when a fake provider is added to the security providers. It creates an empty provider with a specific name, adds it to the security providers using `Security.addProvider(emptyProvider)`, and then calls `Password.hash(\"hash\")`. The test is expected to pass without any exceptions, as indicated by the annotation `@Test(expected = Test.None.class)`. After the test, it removes the fake provider using `Security.removeProvider(name)`.", "golden_doc_uuids": ["36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a"], "golden_chunk_uuids": [["36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", 5]], "golden_documents": [{"uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"’(っ^▿^)۶\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A٩(˘◡˘ ) ❌❌ ❌❌❌\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"ŸŁĀPRČ\")\n .addSalt(\"ŸŁĀPRČAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/IssuesTest.java", "repo_name": "Password4j/password4j", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 0, "content": "package com.password4j;\n\nimport com.password4j.types.Argon2;\nimport org.junit.Assert;\nimport org.junit.Test;\n\nimport java.security.Provider;\nimport java.security.Security;\nimport java.util.Set;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\npublic class IssuesTest\n{\n\n /**\n * @see issue #92\n */\n @Test\n public void issue92()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n String plain = \"Test123!\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n", "meta": {"hash_id": "d5475189421669ce35822d4bcea359553534b705776cfd40d29fa8113814d97c"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 1, "content": " boolean verified = Password.check(plain, hash).with(function);\n Hash newHash = Password.hash(plain).addSalt(\"Y9ΫI2o.W\").with(function);\n boolean verified2 = Password.check(plain, newHash);\n\n assertTrue(verified);\n assertTrue(verified2);\n assertEquals(\"$argon2id$v=19$m=16384,t=2,p=1$WTnOq0kyby5X$SewIdM+Ywctw0lfNQ0xKYoUIlyRs3qF+gVmEVtpdmyg\", newHash.getResult());\n }\n\n", "meta": {"hash_id": "eb427a68c44ab6810661efaff8306e5d2dbbdf3962598c5ef00c2092c5d9c933"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 2, "content": "\n /**\n * @see issue #99\n */\n @Test\n public void issue99()\n {\n int memory = 65536;\n int iterations = 2;\n int parallelism = 3;\n int outputLength = 32;\n int version = 0x13;\n byte[] salt =\n {\n (byte) 0x6b, (byte) 0x25, (byte) 0xc9, (byte) 0xd7, (byte) 0x0e, (byte) 0x5c, (byte) 0x19, (byte) 0xac,\n (byte) 0x51, (byte) 0x74, (byte) 0xd7, (byte) 0x74, (byte) 0x53, (byte) 0xad, (byte) 0x23, (byte) 0x70,\n", "meta": {"hash_id": "f73527ebfb4b6a4b0cb3df2c810c7a6944faa3a6d8e5fc5ace94bc6836edea28"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 3, "content": " (byte) 0x15, (byte) 0x27, (byte) 0x56, (byte) 0x2e, (byte) 0x02, (byte) 0xb8, (byte) 0xec, (byte) 0x5c,\n (byte) 0xac, (byte) 0x89, (byte) 0x2d, (byte) 0xc3, (byte) 0xe4, (byte) 0xb5, (byte) 0x1c, (byte) 0x12\n };\n byte[] password=\"Test\".getBytes();\n Argon2 type = Argon2.ID;\n Argon2Function instance=Argon2Function.getInstance(memory, iterations, parallelism, outputLength, type, version);\n\n Hash hash = instance.hash(password, salt);\n\n\n String expResult = \"cbcfdee482c233e525ca405c7014e89cd33142758a2f1d23c420690f950c988c\";\n assertEquals(expResult, printBytesToString(hash.getBytes()));\n }\n\n", "meta": {"hash_id": "2dbdb4f14bd66be04caaa3754b0d90ad7d47bf1f680f6f382aa6198444c08339"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 4, "content": " /**\n * @see issue #93\n */\n @Test\n public void issue93()\n {\n String hash = \"$argon2id$v=19$m=16384,t=2,p=1$nlm7oNI5zquzSYkyby6oVw$JOkJAYrDB0i2gmiJrXC6o2r+u1rszCm/RO9gIQtnxlY\";\n Argon2Function function = Argon2Function.getInstanceFromHash(hash);\n\n boolean test1 = Password.check(\"Test123!\", hash).with(function);\n assertTrue(test1);\n\n boolean test2 = function.check(\"Test123!\", hash);\n assertTrue(test2);\n }\n\n", "meta": {"hash_id": "c82e6391746775c1f51dc26c5741739be7c84a8fb99801dcafd5b168a6115855"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 5, "content": "\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n", "meta": {"hash_id": "7f42af91092849f6c1b66d48f42dddfcbf638f9bf6a98d87711234ff54e80e20"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 6, "content": " // WHEN\n Password.hash(\"hash\");\n\n // THEN\n Security.removeProvider(name);\n }\n\n\n /**\n * @see issue #126\n */\n @Test\n public void issue126()\n {\n byte[] hashBytes = Password.hash(\"’(っ^▿^)۶\\uD83C\\uDF78\\uD83C\\uDF1F\\uD83C\\uDF7A٩(˘◡˘ ) ❌❌ ❌❌❌\")\n .addSalt(\"\\uD83E\\uDDC2\")\n .withScrypt()\n .getBytes();\n\n", "meta": {"hash_id": "e0b09563621f1ee95fb92714496bb9922d6ae8ac33a52925bbc09c99d5eed304"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 7, "content": " Assert.assertEquals(\"827b022b411e712e5ae4855d8c71cb047d882b2457120d1019974d17dcf6f1bf59644d9a93e470ab14ee5f7a88ae9b0140d2db121de58f6d830fc9c16c82f212\", printBytesToString(hashBytes));\n\n\n hashBytes = Password.hash(\"ŸŁĀPRČ\")\n .addSalt(\"ŸŁĀPRČAA\")\n .withArgon2()\n .getBytes();\n\n Assert.assertEquals(\"59dedcf45d7a8604926ca66f6abe3990ce8b6ba108f535836fa18e95b7d94e9f56301e422c1d487dd06dc26061261402a5f7fe912bd545b6aeec866fec74df81\", printBytesToString(hashBytes));\n\n }\n\n", "meta": {"hash_id": "7f3faf62733cfdf1e86a126b33b33caa562f4f767a45ce4e895bc57b2c22c97a"}}, {"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 8, "content": " private static String printBytesToString(byte[] bytes)\n {\n StringBuilder byteString= new StringBuilder();\n if (bytes!=null)\n {\n for (byte aByte : bytes)\n {\n byteString.append(String.format(\"%02x\", aByte));\n }\n }\n else\n {\n byteString = new StringBuilder(\"-\");\n }\n return byteString.toString();\n }\n\n\n}\n", "meta": {"hash_id": "1c9c14b92a9898e99aa8f37013cef3822f384351ce01b6191e5b9c4a45e426ae"}}]}], "golden_chunks": [{"doc_uuid": "36249e30a48fc8f61995b02dbfc658dd868eaab96d44ce7edc2930b75b51d20a", "index": 5, "content": "\n /**\n * @see issue #120\n */\n @Test(expected = Test.None.class)\n public void issue120()\n {\n // GIVEN\n String name = \"issue120FakeProvider\";\n Provider emptyProvider = new Provider(name, 1, \"info\")\n {\n @Override\n public synchronized Set getServices()\n {\n return null;\n }\n };\n Security.addProvider(emptyProvider);\n\n", "meta": {"hash_id": "7f42af91092849f6c1b66d48f42dddfcbf638f9bf6a98d87711234ff54e80e20"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/com/password4j/IssuesTest.java", "repo_name": "Password4j/password4j", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "Which package does the Argon2 enum belong to?", "answer": "The Argon2 enum is defined in the \"com.password4j.types\" package, as indicated by the package declaration at the top of the file:\n\npackage com.password4j.types;\n", "golden_doc_uuids": ["b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e"], "golden_chunk_uuids": [["b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e", 1], ["b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e", 0]], "golden_documents": [{"uuid": "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e", "content": "/*\n * (C) Copyright 2021 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j.types;\n\n/**\n * Enum containing the different variations of Argon2.\n *\n * @author David Bertoldi\n * @see Argon2\n * @since 1.5.0\n */\npublic enum Argon2\n{\n /**\n * It maximizes resistance to GPU cracking attacks.\n * It accesses the memory array in a password dependent order, which reduces the possibility of time–memory trade-off (TMTO) attacks,\n * but introduces possible side-channel attacks\n */\n D,\n\n /**\n * It is optimized to resist side-channel attacks. It accesses the memory array in a password independent order.\n */\n I,\n\n /**\n * It is a hybrid version. It follows the Argon2i approach for the first half pass over memory and the Argon2d approach for subsequent passes.\n * It is recommended to use Argon2id except when there are reasons to prefer one of the other two modes.\n */\n ID;\n\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/types/Argon2.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e", "index": 0, "content": "/*\n * (C) Copyright 2021 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j.types;\n\n", "meta": {"hash_id": "4945813fea2709f027351579c4147edbcf5df8f84bcf74fb58ff37070ac7430a"}}, {"doc_uuid": "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e", "index": 1, "content": "/**\n * Enum containing the different variations of Argon2.\n *\n * @author David Bertoldi\n * @see Argon2\n * @since 1.5.0\n */\npublic enum Argon2\n{\n /**\n * It maximizes resistance to GPU cracking attacks.\n * It accesses the memory array in a password dependent order, which reduces the possibility of time–memory trade-off (TMTO) attacks,\n * but introduces possible side-channel attacks\n */\n D,\n\n /**\n * It is optimized to resist side-channel attacks. It accesses the memory array in a password independent order.\n */\n I,\n\n /**\n * It is a hybrid version. It follows the Argon2i approach for the first half pass over memory and the Argon2d approach for subsequent passes.\n * It is recommended to use Argon2id except when there are reasons to prefer one of the other two modes.\n */\n ID;\n\n}\n", "meta": {"hash_id": "1394bab9d1c03d8539ae23f694adda1c755a5ca8a81b2c0188af7544e06d5ccd"}}]}], "golden_chunks": [{"doc_uuid": "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e", "index": 1, "content": "/**\n * Enum containing the different variations of Argon2.\n *\n * @author David Bertoldi\n * @see Argon2\n * @since 1.5.0\n */\npublic enum Argon2\n{\n /**\n * It maximizes resistance to GPU cracking attacks.\n * It accesses the memory array in a password dependent order, which reduces the possibility of time–memory trade-off (TMTO) attacks,\n * but introduces possible side-channel attacks\n */\n D,\n\n /**\n * It is optimized to resist side-channel attacks. It accesses the memory array in a password independent order.\n */\n I,\n\n /**\n * It is a hybrid version. It follows the Argon2i approach for the first half pass over memory and the Argon2d approach for subsequent passes.\n * It is recommended to use Argon2id except when there are reasons to prefer one of the other two modes.\n */\n ID;\n\n}\n", "meta": {"hash_id": "1394bab9d1c03d8539ae23f694adda1c755a5ca8a81b2c0188af7544e06d5ccd"}}, {"doc_uuid": "b5c2f52516a708781023ca83bc4b32e3aed11dffee5e877eccb4e8ad57e8697e", "index": 0, "content": "/*\n * (C) Copyright 2021 Password4j (http://password4j.com/).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage com.password4j.types;\n\n", "meta": {"hash_id": "4945813fea2709f027351579c4147edbcf5df8f84bcf74fb58ff37070ac7430a"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/java/com/password4j/types/Argon2.java", "repo_name": "Password4j/password4j", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `Tag` class?", "answer": "The `Tag` class represents an authentication tag that can be verified without channels using the provided APIs. According to the comment, it is \"Very small and no streaming.\" More details about authentication tags can be found in the WASI Crypto specification: \"https://github.com/WebAssembly/wasi-crypto/blob/main/docs/wasi-crypto.md#authentication-tags\"", "golden_doc_uuids": ["7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298"], "golden_chunk_uuids": [["7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", 1]], "golden_documents": [{"uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/plugins/wasi_crypto/symmetric/tag.h - Symmetric Tag class ===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the Symmetric Tag definition.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"utils/error.h\"\n#include \"utils/secret_vec.h\"\n\n#include \"common/span.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace Symmetric {\n\n/// Authentication tag, that can be verified without channels using the provided\n/// APIs. Very small and no streaming.\n///\n/// More detail:\n/// https://github.com/WebAssembly/wasi-crypto/blob/main/docs/wasi-crypto.md#authentication-tags\nclass Tag {\npublic:\n Tag(Tag &&Data) noexcept = default;\n Tag &operator=(Tag &&Data) noexcept = default;\n Tag(const Tag &Data) noexcept = delete;\n Tag &operator=(const Tag &Data) noexcept = delete;\n\n Tag(SecretVec &&Data) noexcept : Data(std::move(Data)) {}\n\n size_t len() const noexcept { return Data.size(); }\n\n /// The function MUST return `__WASI_CRYPTO_ERRNO_INVALID_TAG` if the\n /// tags don't match.\n WasiCryptoExpect verify(Span RawTag) const noexcept;\n\n WasiCryptoExpect pull(Span Raw) const noexcept;\n\nprivate:\n SecretVec Data;\n};\n\n} // namespace Symmetric\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/plugins/wasi_crypto/symmetric/tag.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/plugins/wasi_crypto/symmetric/tag.h - Symmetric Tag class ===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the Symmetric Tag definition.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n", "meta": {"hash_id": "edbe4041f6380bdc407c8e8a336626b8660419642ea2586431655aca1b78a7ee"}}, {"doc_uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", "index": 1, "content": "#include \"utils/error.h\"\n#include \"utils/secret_vec.h\"\n\n#include \"common/span.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace Symmetric {\n\n/// Authentication tag, that can be verified without channels using the provided\n/// APIs. Very small and no streaming.\n///\n/// More detail:\n/// https://github.com/WebAssembly/wasi-crypto/blob/main/docs/wasi-crypto.md#authentication-tags\nclass Tag {\npublic:\n Tag(Tag &&Data) noexcept = default;\n Tag &operator=(Tag &&Data) noexcept = default;\n Tag(const Tag &Data) noexcept = delete;\n Tag &operator=(const Tag &Data) noexcept = delete;\n\n", "meta": {"hash_id": "6540e892882829cdd4f82974eb5ffd54112220ceae66053e58ddb7196709203d"}}, {"doc_uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", "index": 2, "content": " Tag(SecretVec &&Data) noexcept : Data(std::move(Data)) {}\n\n size_t len() const noexcept { return Data.size(); }\n\n /// The function MUST return `__WASI_CRYPTO_ERRNO_INVALID_TAG` if the\n /// tags don't match.\n WasiCryptoExpect verify(Span RawTag) const noexcept;\n\n WasiCryptoExpect pull(Span Raw) const noexcept;\n\nprivate:\n SecretVec Data;\n};\n\n} // namespace Symmetric\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "d017e99e3a42cf5ef6cec4ec49fa988df18dbc1b54e6ead3e7e7db020b5889ee"}}]}], "golden_chunks": [{"doc_uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", "index": 1, "content": "#include \"utils/error.h\"\n#include \"utils/secret_vec.h\"\n\n#include \"common/span.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace Symmetric {\n\n/// Authentication tag, that can be verified without channels using the provided\n/// APIs. Very small and no streaming.\n///\n/// More detail:\n/// https://github.com/WebAssembly/wasi-crypto/blob/main/docs/wasi-crypto.md#authentication-tags\nclass Tag {\npublic:\n Tag(Tag &&Data) noexcept = default;\n Tag &operator=(Tag &&Data) noexcept = default;\n Tag(const Tag &Data) noexcept = delete;\n Tag &operator=(const Tag &Data) noexcept = delete;\n\n", "meta": {"hash_id": "6540e892882829cdd4f82974eb5ffd54112220ceae66053e58ddb7196709203d"}}], "meta": {"doctype": "codebase", "relative_path": "/plugins/wasi_crypto/symmetric/tag.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `pull` function in the `Tag` class?", "answer": "The `pull` function is used to retrieve the tag data. It takes a `Span` representing the raw buffer where the tag data will be copied and returns a `WasiCryptoExpect` indicating the number of bytes copied.", "golden_doc_uuids": ["7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298"], "golden_chunk_uuids": [["7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", 2]], "golden_documents": [{"uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/plugins/wasi_crypto/symmetric/tag.h - Symmetric Tag class ===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the Symmetric Tag definition.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"utils/error.h\"\n#include \"utils/secret_vec.h\"\n\n#include \"common/span.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace Symmetric {\n\n/// Authentication tag, that can be verified without channels using the provided\n/// APIs. Very small and no streaming.\n///\n/// More detail:\n/// https://github.com/WebAssembly/wasi-crypto/blob/main/docs/wasi-crypto.md#authentication-tags\nclass Tag {\npublic:\n Tag(Tag &&Data) noexcept = default;\n Tag &operator=(Tag &&Data) noexcept = default;\n Tag(const Tag &Data) noexcept = delete;\n Tag &operator=(const Tag &Data) noexcept = delete;\n\n Tag(SecretVec &&Data) noexcept : Data(std::move(Data)) {}\n\n size_t len() const noexcept { return Data.size(); }\n\n /// The function MUST return `__WASI_CRYPTO_ERRNO_INVALID_TAG` if the\n /// tags don't match.\n WasiCryptoExpect verify(Span RawTag) const noexcept;\n\n WasiCryptoExpect pull(Span Raw) const noexcept;\n\nprivate:\n SecretVec Data;\n};\n\n} // namespace Symmetric\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/plugins/wasi_crypto/symmetric/tag.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/plugins/wasi_crypto/symmetric/tag.h - Symmetric Tag class ===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the Symmetric Tag definition.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n", "meta": {"hash_id": "edbe4041f6380bdc407c8e8a336626b8660419642ea2586431655aca1b78a7ee"}}, {"doc_uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", "index": 1, "content": "#include \"utils/error.h\"\n#include \"utils/secret_vec.h\"\n\n#include \"common/span.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace Symmetric {\n\n/// Authentication tag, that can be verified without channels using the provided\n/// APIs. Very small and no streaming.\n///\n/// More detail:\n/// https://github.com/WebAssembly/wasi-crypto/blob/main/docs/wasi-crypto.md#authentication-tags\nclass Tag {\npublic:\n Tag(Tag &&Data) noexcept = default;\n Tag &operator=(Tag &&Data) noexcept = default;\n Tag(const Tag &Data) noexcept = delete;\n Tag &operator=(const Tag &Data) noexcept = delete;\n\n", "meta": {"hash_id": "6540e892882829cdd4f82974eb5ffd54112220ceae66053e58ddb7196709203d"}}, {"doc_uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", "index": 2, "content": " Tag(SecretVec &&Data) noexcept : Data(std::move(Data)) {}\n\n size_t len() const noexcept { return Data.size(); }\n\n /// The function MUST return `__WASI_CRYPTO_ERRNO_INVALID_TAG` if the\n /// tags don't match.\n WasiCryptoExpect verify(Span RawTag) const noexcept;\n\n WasiCryptoExpect pull(Span Raw) const noexcept;\n\nprivate:\n SecretVec Data;\n};\n\n} // namespace Symmetric\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "d017e99e3a42cf5ef6cec4ec49fa988df18dbc1b54e6ead3e7e7db020b5889ee"}}]}], "golden_chunks": [{"doc_uuid": "7aa93d0e14ee0838bd30f00d35918a93645c3b8bc8bb390b8b87bb7b77e65298", "index": 2, "content": " Tag(SecretVec &&Data) noexcept : Data(std::move(Data)) {}\n\n size_t len() const noexcept { return Data.size(); }\n\n /// The function MUST return `__WASI_CRYPTO_ERRNO_INVALID_TAG` if the\n /// tags don't match.\n WasiCryptoExpect verify(Span RawTag) const noexcept;\n\n WasiCryptoExpect pull(Span Raw) const noexcept;\n\nprivate:\n SecretVec Data;\n};\n\n} // namespace Symmetric\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "d017e99e3a42cf5ef6cec4ec49fa988df18dbc1b54e6ead3e7e7db020b5889ee"}}], "meta": {"doctype": "codebase", "relative_path": "/plugins/wasi_crypto/symmetric/tag.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How does the Serializer class serialize branch control instructions like br and br_if?", "answer": "The Serializer class serializes branch control instructions by encoding the target label index as a 32-bit integer. For example, for a br instruction:\n\n Br.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {Br, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8 \n 0x00U, // Local vec(0)\n 0x0CU, // OpCode Br.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n\nThe label index 0xFFFFFFFF is encoded as the 5 bytes 0xFF, 0xFF, 0xFF, 0xFF, 0x0F after the br opcode 0x0C. br_if is serialized the same way, just with opcode 0x0D.", "golden_doc_uuids": ["0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238"], "golden_chunk_uuids": [["0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", 12]], "golden_documents": [{"uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"loader/serialize.h\"\n\n#include \n#include \n#include \n\nnamespace {\n\nWasmEdge::Configure Conf;\nWasmEdge::Loader::Serializer Ser(Conf);\n\nWasmEdge::AST::CodeSection\ncreateCodeSec(std::vector Instructions) {\n WasmEdge::AST::CodeSection CodeSec;\n WasmEdge::AST::CodeSegment CodeSeg;\n WasmEdge::AST::Expression Expr;\n Expr.getInstrs() = Instructions;\n CodeSeg.getExpr() = Expr;\n CodeSec.getContent().push_back(CodeSeg);\n return CodeSec;\n}\n\nTEST(SerializeInstructionTest, SerializeBlockControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 1. Test block control instructions.\n //\n // 1. Serialize block with only end operation.\n // 2. Serialize loop with only end operation.\n // 3. Serialize block with instructions.\n // 4. Serialize loop with instructions.\n\n WasmEdge::AST::Instruction Block(WasmEdge::OpCode::Block);\n WasmEdge::AST::Instruction Loop(WasmEdge::OpCode::Loop);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n Block.setEmptyBlockType();\n Instructions = {Block, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n Loop.setEmptyBlockType();\n Instructions = {Loop, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n Loop.setEmptyBlockType();\n Instructions = {Block, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n Loop.setEmptyBlockType();\n Instructions = {Loop, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeIfElseControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 2. Test serialize if-else control instruction.\n //\n // 1. Serialize if statement with only end operation.\n // 2. Serialize if and else statements with only end operation.\n // 3. Serialize if statement with instructions.\n // 4. Serialize if and else statements with instructions.\n\n WasmEdge::AST::Instruction If(WasmEdge::OpCode::If);\n WasmEdge::AST::Instruction Else(WasmEdge::OpCode::Else);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n If.setEmptyBlockType();\n Instructions = {If, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n If.setEmptyBlockType();\n Instructions = {If, Else, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x05U, // OpCode Else\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, Else,\n I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0EU, // Content size = 14\n 0x01U, // Vector length = 1\n 0x0CU, // Code segment size = 12\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x05U, // OpCode Else\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in else statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeBrControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 3. Test branch control instructions.\n //\n // 1. Serialize valid label index.\n\n WasmEdge::AST::Instruction Br(WasmEdge::OpCode::Br);\n WasmEdge::AST::Instruction BrIf(WasmEdge::OpCode::Br_if);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n Br.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {Br, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x0CU, // OpCode Br.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n BrIf.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrIf, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected[5] = 0x0DU; // OpCode Br_if.\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeBrTableControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 4. Test branch table control instruction.\n //\n // 1. Serialize instruction with empty label vector.\n // 2. Serialize instruction with label vector.\n\n WasmEdge::AST::Instruction BrTable(WasmEdge::OpCode::Br_table);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n BrTable.setLabelListSize(1);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x00U, // Vector length = 0\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n BrTable.setLabelListSize(4);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFF1U;\n BrTable.getLabelList()[1].TargetIndex = 0xFFFFFFF2U;\n BrTable.getLabelList()[2].TargetIndex = 0xFFFFFFF3U;\n BrTable.getLabelList()[3].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x1AU, // Content size = 26\n 0x01U, // Vector length = 1\n 0x18U, // Code segment size = 24\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x03U, // Vector length = 3\n 0xF1U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[0]\n 0xF2U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[1]\n 0xF3U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[2]\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeCallControlInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 5. Test call control instructions.\n //\n // 1. Serialize call instruction with valid type index.\n // 2. Serialize call_indirect instruction with valid type and table index.\n // 3. Serialize call_indirect instruction with invalid table index without\n // Ref-Types proposal.\n\n WasmEdge::AST::Instruction Call(WasmEdge::OpCode::Call);\n WasmEdge::AST::Instruction CallIndirect(WasmEdge::OpCode::Call_indirect);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n Call.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {Call, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x10U, // OpCode Call.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Function type index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n CallIndirect.getTargetIndex() = 0xFFFFFFFFU;\n CallIndirect.getSourceIndex() = 0x05U;\n Instructions = {CallIndirect, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x11U, // OpCode Call_indirect.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Type index.\n 0x05U, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeReferenceInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 6. Test reference instructions.\n //\n // 1. Serialize function reference type.\n // 2. Serialize invalid reference type without Ref-Types proposal.\n\n WasmEdge::AST::Instruction RefNull(WasmEdge::OpCode::Ref__null);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n RefNull.setValType(WasmEdge::TypeCode::FuncRef);\n Instructions = {RefNull, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0xD0U, // OpCode Ref__null.\n 0x70U, // FuncRef\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n RefNull.setValType(WasmEdge::TypeCode::ExternRef);\n Instructions = {RefNull, End};\n EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeParametricInstruction) {\n WasmEdge::Configure ConfNoSIMD;\n ConfNoSIMD.removeProposal(WasmEdge::Proposal::SIMD);\n WasmEdge::Loader::Serializer SerNoSIMD(ConfNoSIMD);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 7. Test parametric instructions.\n //\n // 1. Serialize valid select_t instruction with value type list.\n // 2. Serialize invalid value type list without SIMD proposal.\n\n WasmEdge::AST::Instruction SelectT(WasmEdge::OpCode::Select_t);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n SelectT.setValTypeListSize(2);\n SelectT.getValTypeList()[0] = WasmEdge::TypeCode::I32;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::I64;\n Instructions = {SelectT, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x1CU, // OpCode Select_t.\n 0x02U, // Vector length = 2\n 0x7FU, 0x7EU, // Value types\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n SelectT.getValTypeList()[0] = WasmEdge::TypeCode::V128;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::V128;\n Instructions = {SelectT, End};\n EXPECT_FALSE(SerNoSIMD.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeVariableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 8. Test variable instructions.\n //\n // 1. Serialize valid local or global index.\n\n WasmEdge::AST::Instruction LocalGet(WasmEdge::OpCode::Local__get);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n LocalGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {LocalGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x20U, // OpCode Local__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Local index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeTableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 9. Test table instructions.\n //\n // 1. Serialize table_get instruction.\n // 2. Serialize table_init instruction.\n\n WasmEdge::AST::Instruction TableGet(WasmEdge::OpCode::Table__get);\n WasmEdge::AST::Instruction TableInit(WasmEdge::OpCode::Table__init);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n TableGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x25U, // OpCode Table__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n TableInit.getSourceIndex() = 0x05U;\n TableInit.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableInit, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0CU, // Content size = 12\n 0x01U, // Vector length = 1\n 0x0AU, // Code segment size = 10\n 0x00U, // Local vec(0)\n 0xFCU, 0x0CU, // OpCode Table__init.\n 0x05U, // Element idx.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeMemoryInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 10. Test memory instructions.\n //\n // 1. Serialize memory_grow instruction.\n // 2. Serialize i32_load instruction.\n\n WasmEdge::AST::Instruction MemoryGrow(WasmEdge::OpCode::Memory__grow);\n WasmEdge::AST::Instruction I32Load(WasmEdge::OpCode::I32__load);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n Instructions = {MemoryGrow, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0x40U, // OpCode Memory__grow.\n 0x00U, // Checking byte\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeConstInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 11. Test const numeric instructions.\n //\n // 1. Serialize I32 const numeric instruction.\n // 2. Serialize I64 const numeric instruction.\n // 3. Serialize F32 const numeric instruction.\n // 4. Serialize F64 const numeric instruction.\n\n WasmEdge::AST::Instruction I32Const(WasmEdge::OpCode::I32__const);\n WasmEdge::AST::Instruction I64Const(WasmEdge::OpCode::I64__const);\n WasmEdge::AST::Instruction F32Const(WasmEdge::OpCode::F32__const);\n WasmEdge::AST::Instruction F64Const(WasmEdge::OpCode::F64__const);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n I32Const.setNum(-123456);\n Instructions = {I32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x41U, // OpCode I32__const.\n 0xC0U, 0xBBU, 0x78U, // I32 -123456.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n I64Const.setNum(static_cast(-112233445566L));\n Instructions = {I64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x42U, // OpCode I64__const.\n 0xC2U, 0x8EU, 0xF6U, 0xF2U, 0xDDU, 0x7CU, // I64 -112233445566\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n F32Const.setNum(static_cast(-0x1.921fb4p+1)); // -3.1415926F\n Instructions = {F32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x09U, // Content size = 9\n 0x01U, // Vector length = 1\n 0x07U, // Code segment size = 7\n 0x00U, // Local vec(0)\n 0x43U, // OpCode F32__const.\n 0xDAU, 0x0FU, 0x49U, 0xC0U, // F32 -3.1415926\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n F64Const.setNum(-3.1415926535897932);\n Instructions = {F64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0DU, // Content size = 13\n 0x01U, // Vector length = 1\n 0x0BU, // Code segment size = 11\n 0x00U, // Local vec(0)\n 0x44U, // OpCode F64__const.\n 0x18U, 0x2DU, 0x44U, 0x54U,\n 0xFBU, 0x21U, 0x09U, 0xC0U, // F64 -3.1415926535897932\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n} // namespace\n", "meta": {"doctype": "codebase", "relative_path": "/test/loader/serializeInstructionTest.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 37, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"loader/serialize.h\"\n\n#include \n#include \n#include \n\nnamespace {\n\nWasmEdge::Configure Conf;\nWasmEdge::Loader::Serializer Ser(Conf);\n\nWasmEdge::AST::CodeSection\ncreateCodeSec(std::vector Instructions) {\n WasmEdge::AST::CodeSection CodeSec;\n WasmEdge::AST::CodeSegment CodeSeg;\n WasmEdge::AST::Expression Expr;\n Expr.getInstrs() = Instructions;\n CodeSeg.getExpr() = Expr;\n CodeSec.getContent().push_back(CodeSeg);\n return CodeSec;\n}\n\n", "meta": {"hash_id": "17992b4c275604176476a1483de32cc0a325a812d0dd5a92b90a23f164105db9"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 1, "content": "TEST(SerializeInstructionTest, SerializeBlockControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 1. Test block control instructions.\n //\n // 1. Serialize block with only end operation.\n // 2. Serialize loop with only end operation.\n // 3. Serialize block with instructions.\n // 4. Serialize loop with instructions.\n\n WasmEdge::AST::Instruction Block(WasmEdge::OpCode::Block);\n WasmEdge::AST::Instruction Loop(WasmEdge::OpCode::Loop);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n", "meta": {"hash_id": "b56b45bb1b10d983c253903591e89c2465b1f69e4b34c7d20c3d2373389b9698"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 2, "content": " Block.setEmptyBlockType();\n Instructions = {Block, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "e6856e79c658fa29a4cf38959ccd574e3817dcca1f1694433f384434d13fcf97"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 3, "content": " Loop.setEmptyBlockType();\n Instructions = {Loop, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "c1111987524e9fbe1dd49fe231967764de81b652f5316b1c267cda9c4cc354b8"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 4, "content": " Loop.setEmptyBlockType();\n Instructions = {Block, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x02U, // OpCode Block.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "09dc54e935c84aa5dec4c0f80879d20e094ab7400e64cf5abe1c1c036b12bc31"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 5, "content": " Loop.setEmptyBlockType();\n Instructions = {Loop, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x03U, // OpCode Loop.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "meta": {"hash_id": "78dde98ee0cb2db98963deaa52d3150427354c6209999b81ffcba7c680532b08"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 6, "content": "TEST(SerializeInstructionTest, SerializeIfElseControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 2. Test serialize if-else control instruction.\n //\n // 1. Serialize if statement with only end operation.\n // 2. Serialize if and else statements with only end operation.\n // 3. Serialize if statement with instructions.\n // 4. Serialize if and else statements with instructions.\n\n", "meta": {"hash_id": "27a08ad25b135418f568a2e019648a55211b2652eb5e24cbd3e98e122f5a4dfb"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 7, "content": " WasmEdge::AST::Instruction If(WasmEdge::OpCode::If);\n WasmEdge::AST::Instruction Else(WasmEdge::OpCode::Else);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n WasmEdge::AST::Instruction I32Eqz(WasmEdge::OpCode::I32__eqz);\n WasmEdge::AST::Instruction I32Eq(WasmEdge::OpCode::I32__eq);\n WasmEdge::AST::Instruction I32Ne(WasmEdge::OpCode::I32__ne);\n\n If.setEmptyBlockType();\n Instructions = {If, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x07U, // Content size = 7\n 0x01U, // Vector length = 1\n 0x05U, // Code segment size = 5\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "e5c879ea22961259a7cc2e03f2f735c853ecffcce4d7042a79700a4cb922eace"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 8, "content": " If.setEmptyBlockType();\n Instructions = {If, Else, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x05U, // OpCode Else\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "c825bd0bebeff06f217c028b3eb46afbb0a10f13f885f2160f7cd6dcf75225b1"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 9, "content": " If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "e302d29ac7aae50bdd8f7ebb61e5d65efa5bc5d8e8bb3a617ac74e4e34f520a7"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 10, "content": " If.setEmptyBlockType();\n Instructions = {If, I32Eqz, I32Eq, I32Ne, Else,\n I32Eqz, I32Eq, I32Ne, End, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0EU, // Content size = 14\n 0x01U, // Vector length = 1\n 0x0CU, // Code segment size = 12\n 0x00U, // Local vec(0)\n 0x04U, // OpCode If.\n 0x40U, // Block type.\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in if statement.\n 0x05U, // OpCode Else\n 0x45U, 0x46U, 0x47U, // Valid OpCodes in else statement.\n 0x0BU, // OpCode End.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "meta": {"hash_id": "86ce8516bd967178e49c27417f525623c8c012b726f7ed4c031904d223f5a8ba"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 11, "content": "TEST(SerializeInstructionTest, SerializeBrControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 3. Test branch control instructions.\n //\n // 1. Serialize valid label index.\n\n WasmEdge::AST::Instruction Br(WasmEdge::OpCode::Br);\n WasmEdge::AST::Instruction BrIf(WasmEdge::OpCode::Br_if);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "meta": {"hash_id": "ce8c9ae832ffade2b96bfef0d430d7d92a70f1be3349228bb52713974f0004e4"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 12, "content": " Br.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {Br, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x0CU, // OpCode Br.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "52d5508308dce755f68b18ec3d7dc960d36c112d6d5b48d25b809d79876fba17"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 13, "content": " BrIf.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrIf, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected[5] = 0x0DU; // OpCode Br_if.\n EXPECT_EQ(Output, Expected);\n}\n\nTEST(SerializeInstructionTest, SerializeBrTableControlInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 4. Test branch table control instruction.\n //\n // 1. Serialize instruction with empty label vector.\n // 2. Serialize instruction with label vector.\n\n WasmEdge::AST::Instruction BrTable(WasmEdge::OpCode::Br_table);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "meta": {"hash_id": "13b1be0029a84aa9bc4b0491cd002067a2480412ce50e9d252fbaa08763a69e3"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 14, "content": " BrTable.setLabelListSize(1);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x00U, // Vector length = 0\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "3c73dabb43d05a181f182a71449ae645fe6354082dca2ca3d22f5e9f8dce7e84"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 15, "content": " BrTable.setLabelListSize(4);\n BrTable.getLabelList()[0].TargetIndex = 0xFFFFFFF1U;\n BrTable.getLabelList()[1].TargetIndex = 0xFFFFFFF2U;\n BrTable.getLabelList()[2].TargetIndex = 0xFFFFFFF3U;\n BrTable.getLabelList()[3].TargetIndex = 0xFFFFFFFFU;\n Instructions = {BrTable, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n", "meta": {"hash_id": "fe5b26544a9d3fa6650e66adb317ecafe989f4e1fd45140b7d252ee68a836ffd"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 16, "content": " 0x1AU, // Content size = 26\n 0x01U, // Vector length = 1\n 0x18U, // Code segment size = 24\n 0x00U, // Local vec(0)\n 0x0EU, // OpCode Br_table.\n 0x03U, // Vector length = 3\n 0xF1U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[0]\n 0xF2U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[1]\n 0xF3U, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // vec[2]\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "meta": {"hash_id": "648d906a9ec5b3ca86e80274bb40696d012e659a0dc3d1b66fd870e4c2fee9e8"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 17, "content": "TEST(SerializeInstructionTest, SerializeCallControlInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 5. Test call control instructions.\n //\n // 1. Serialize call instruction with valid type index.\n // 2. Serialize call_indirect instruction with valid type and table index.\n // 3. Serialize call_indirect instruction with invalid table index without\n // Ref-Types proposal.\n\n WasmEdge::AST::Instruction Call(WasmEdge::OpCode::Call);\n WasmEdge::AST::Instruction CallIndirect(WasmEdge::OpCode::Call_indirect);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "meta": {"hash_id": "561d4d2386d4c768d9fe526500f566d3578c1de76184270db285b69e339391ae"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 18, "content": " Call.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {Call, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x10U, // OpCode Call.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Function type index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "fa7eb7a8bc842cf3db659f8faae524b5b9247d03e3437ef01e9ac750ef8876ba"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 19, "content": " CallIndirect.getTargetIndex() = 0xFFFFFFFFU;\n CallIndirect.getSourceIndex() = 0x05U;\n Instructions = {CallIndirect, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x11U, // OpCode Call_indirect.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Type index.\n 0x05U, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "3596c9eb875d69f9105ca4ea32cf723ad26863bd6d31ce35a3a31b591b55c605"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 20, "content": " EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeReferenceInstruction) {\n WasmEdge::Configure ConfNoRefType;\n ConfNoRefType.removeProposal(WasmEdge::Proposal::ReferenceTypes);\n WasmEdge::Loader::Serializer SerNoRefType(ConfNoRefType);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 6. Test reference instructions.\n //\n // 1. Serialize function reference type.\n // 2. Serialize invalid reference type without Ref-Types proposal.\n\n WasmEdge::AST::Instruction RefNull(WasmEdge::OpCode::Ref__null);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "meta": {"hash_id": "8cbc3ff222dc3a444fdf483e7ea1b9e728d9dcdc9b19e323690224442b005295"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 21, "content": " RefNull.setValType(WasmEdge::TypeCode::FuncRef);\n Instructions = {RefNull, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0xD0U, // OpCode Ref__null.\n 0x70U, // FuncRef\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "b4f683d22dce122631a89156ab86840fbee153907a9d13b0f835da8087e43cfd"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 22, "content": " RefNull.setValType(WasmEdge::TypeCode::ExternRef);\n Instructions = {RefNull, End};\n EXPECT_FALSE(\n SerNoRefType.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeParametricInstruction) {\n WasmEdge::Configure ConfNoSIMD;\n ConfNoSIMD.removeProposal(WasmEdge::Proposal::SIMD);\n WasmEdge::Loader::Serializer SerNoSIMD(ConfNoSIMD);\n\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 7. Test parametric instructions.\n //\n // 1. Serialize valid select_t instruction with value type list.\n // 2. Serialize invalid value type list without SIMD proposal.\n\n WasmEdge::AST::Instruction SelectT(WasmEdge::OpCode::Select_t);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "meta": {"hash_id": "b4fdd36f039c0c72b46150997556b3fcc195fa6f5128ce3b7c5d8359b4f0f784"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 23, "content": " SelectT.setValTypeListSize(2);\n SelectT.getValTypeList()[0] = WasmEdge::TypeCode::I32;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::I64;\n Instructions = {SelectT, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x1CU, // OpCode Select_t.\n 0x02U, // Vector length = 2\n 0x7FU, 0x7EU, // Value types\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "35b0b33b401487d746251254f7fb8b78802fe08a1fd2e2e6fce2548b81e21f74"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 24, "content": " SelectT.getValTypeList()[0] = WasmEdge::TypeCode::V128;\n SelectT.getValTypeList()[1] = WasmEdge::TypeCode::V128;\n Instructions = {SelectT, End};\n EXPECT_FALSE(SerNoSIMD.serializeSection(createCodeSec(Instructions), Output));\n}\n\nTEST(SerializeInstructionTest, SerializeVariableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 8. Test variable instructions.\n //\n // 1. Serialize valid local or global index.\n\n WasmEdge::AST::Instruction LocalGet(WasmEdge::OpCode::Local__get);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "meta": {"hash_id": "e89ece15998d03a15e8f5f5d43510cd4c7b1e8335b27294035d3932de17b8898"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 25, "content": " LocalGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {LocalGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x20U, // OpCode Local__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Local index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "meta": {"hash_id": "8b19b88699c15e201475db4ba44559d48c8fe926f93fa1c544a53ca4c32a7d0c"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 26, "content": "TEST(SerializeInstructionTest, SerializeTableInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 9. Test table instructions.\n //\n // 1. Serialize table_get instruction.\n // 2. Serialize table_init instruction.\n\n WasmEdge::AST::Instruction TableGet(WasmEdge::OpCode::Table__get);\n WasmEdge::AST::Instruction TableInit(WasmEdge::OpCode::Table__init);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "meta": {"hash_id": "52b4b9842d24bf7b08d12c9998396c1a55546f00e0cb8286ccffad4789c74d32"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 27, "content": " TableGet.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableGet, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x25U, // OpCode Table__get.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "ea09255eeb5d0481ef7381966e0842e345244e1f0395d01977d74c86928709cf"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 28, "content": " TableInit.getSourceIndex() = 0x05U;\n TableInit.getTargetIndex() = 0xFFFFFFFFU;\n Instructions = {TableInit, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0CU, // Content size = 12\n 0x01U, // Vector length = 1\n 0x0AU, // Code segment size = 10\n 0x00U, // Local vec(0)\n 0xFCU, 0x0CU, // OpCode Table__init.\n 0x05U, // Element idx.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Table index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "meta": {"hash_id": "dbfaea21209001acb6aa6d1722a799b8767c1d1b50fc5fc192309fad1c8ef35d"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 29, "content": "TEST(SerializeInstructionTest, SerializeMemoryInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 10. Test memory instructions.\n //\n // 1. Serialize memory_grow instruction.\n // 2. Serialize i32_load instruction.\n\n WasmEdge::AST::Instruction MemoryGrow(WasmEdge::OpCode::Memory__grow);\n WasmEdge::AST::Instruction I32Load(WasmEdge::OpCode::I32__load);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n Instructions = {MemoryGrow, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x06U, // Content size = 6\n 0x01U, // Vector length = 1\n 0x04U, // Code segment size = 4\n 0x00U, // Local vec(0)\n 0x40U, // OpCode Memory__grow.\n 0x00U, // Checking byte\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "b6fa2562d72e6659fd78a60a7f03cb49be967a1c7fa9550124e4d38138f53585"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 30, "content": " I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "c07d3fa3e0551f597026bd373b141ab27225aa070de8e6529a50a9c13be58713"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 31, "content": " I32Load.getMemoryAlign() = 0xFFFFFFFFU;\n I32Load.getMemoryOffset() = 0xFFFFFFFEU;\n Instructions = {I32Load, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0FU, // Content size = 15\n 0x01U, // Vector length = 1\n 0x0DU, // Code segment size = 13\n 0x00U, // Local vec(0)\n 0x28U, // OpCode I32__load.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Align.\n 0xFEU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Offset.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n\n", "meta": {"hash_id": "d5fba779ff745a0c7ef3b17056848f889fbdba678be7e39733b18de7b55a688e"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 32, "content": "TEST(SerializeInstructionTest, SerializeConstInstruction) {\n std::vector Expected;\n std::vector Output;\n std::vector Instructions;\n\n // 11. Test const numeric instructions.\n //\n // 1. Serialize I32 const numeric instruction.\n // 2. Serialize I64 const numeric instruction.\n // 3. Serialize F32 const numeric instruction.\n // 4. Serialize F64 const numeric instruction.\n\n WasmEdge::AST::Instruction I32Const(WasmEdge::OpCode::I32__const);\n WasmEdge::AST::Instruction I64Const(WasmEdge::OpCode::I64__const);\n WasmEdge::AST::Instruction F32Const(WasmEdge::OpCode::F32__const);\n WasmEdge::AST::Instruction F64Const(WasmEdge::OpCode::F64__const);\n WasmEdge::AST::Instruction End(WasmEdge::OpCode::End);\n\n", "meta": {"hash_id": "f8841d58a7135b37fc2f7241fe9d729e3009d925664bd0bd6f8ae3e77e65b284"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 33, "content": " I32Const.setNum(-123456);\n Instructions = {I32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x08U, // Content size = 8\n 0x01U, // Vector length = 1\n 0x06U, // Code segment size = 6\n 0x00U, // Local vec(0)\n 0x41U, // OpCode I32__const.\n 0xC0U, 0xBBU, 0x78U, // I32 -123456.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "8b79b4c0b169da1e34373d52da43714150f20786156f8bbda6373371a756e46d"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 34, "content": " I64Const.setNum(static_cast(-112233445566L));\n Instructions = {I64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0BU, // Content size = 11\n 0x01U, // Vector length = 1\n 0x09U, // Code segment size = 9\n 0x00U, // Local vec(0)\n 0x42U, // OpCode I64__const.\n 0xC2U, 0x8EU, 0xF6U, 0xF2U, 0xDDU, 0x7CU, // I64 -112233445566\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "65d493e261555d79c7893b3c178a8d557000b6f67bb1ea6edaf2f3a889708683"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 35, "content": " F32Const.setNum(static_cast(-0x1.921fb4p+1)); // -3.1415926F\n Instructions = {F32Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x09U, // Content size = 9\n 0x01U, // Vector length = 1\n 0x07U, // Code segment size = 7\n 0x00U, // Local vec(0)\n 0x43U, // OpCode F32__const.\n 0xDAU, 0x0FU, 0x49U, 0xC0U, // F32 -3.1415926\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "3e7b5e3152055cdc650db5a5d8602c7f8de49150b7fee51a4d0e06665d6a3fd3"}}, {"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 36, "content": " F64Const.setNum(-3.1415926535897932);\n Instructions = {F64Const, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0DU, // Content size = 13\n 0x01U, // Vector length = 1\n 0x0BU, // Code segment size = 11\n 0x00U, // Local vec(0)\n 0x44U, // OpCode F64__const.\n 0x18U, 0x2DU, 0x44U, 0x54U,\n 0xFBU, 0x21U, 0x09U, 0xC0U, // F64 -3.1415926535897932\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n}\n} // namespace\n", "meta": {"hash_id": "c85a6c85800d206298c857f3e31af0dc959ca2e73dac5d9848849c85606793be"}}]}], "golden_chunks": [{"doc_uuid": "0de15e15952dddf36ca2fc607967a2513a2d7ccf59b95710f1091d5b53819238", "index": 12, "content": " Br.getJump().TargetIndex = 0xFFFFFFFFU;\n Instructions = {Br, End};\n Output = {};\n EXPECT_TRUE(Ser.serializeSection(createCodeSec(Instructions), Output));\n Expected = {\n 0x0AU, // Code section\n 0x0AU, // Content size = 10\n 0x01U, // Vector length = 1\n 0x08U, // Code segment size = 8\n 0x00U, // Local vec(0)\n 0x0CU, // OpCode Br.\n 0xFFU, 0xFFU, 0xFFU, 0xFFU, 0x0FU, // Label index.\n 0x0BU // Expression End.\n };\n EXPECT_EQ(Output, Expected);\n\n", "meta": {"hash_id": "52d5508308dce755f68b18ec3d7dc960d36c112d6d5b48d25b809d79876fba17"}}], "meta": {"doctype": "codebase", "relative_path": "/test/loader/serializeInstructionTest.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 37, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `Log` class?", "answer": "The `Log` class is a host function, as indicated by its inheritance from `Runtime::HostFunction`. It defines a `body` method that takes several `uint32_t` parameters and returns an `Expect`. Inside the `body` method, it calls `printPluginMock` with the string literal `\"wasi-logging\"` and returns `Unexpect(ErrCode::Value::HostFuncError)`. This suggests that the `Log` class is a mock implementation of a logging functionality in the \"wasi-logging\" plugin.", "golden_doc_uuids": ["adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823"], "golden_chunk_uuids": [["adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823", 0]], "golden_documents": [{"uuid": "adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823", "content": "#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiLoggingMock {\n\nusing namespace std::literals;\n\nclass Log : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"wasi-logging\"sv);\n return Unexpect(ErrCode::Value::HostFuncError);\n }\n};\n\n} // namespace WasiLoggingMock\n} // namespace Host\n} // namespace WasmEdge", "meta": {"doctype": "codebase", "relative_path": "/include/host/mock/wasi_logging_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823", "index": 0, "content": "#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiLoggingMock {\n\nusing namespace std::literals;\n\nclass Log : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"wasi-logging\"sv);\n return Unexpect(ErrCode::Value::HostFuncError);\n }\n};\n\n} // namespace WasiLoggingMock\n} // namespace Host\n} // namespace WasmEdge", "meta": {"hash_id": "9dd8e8fc28fc298b84bd1c681dd2fa969c2634ef670d1e6c41d4c9214b95c176"}}]}], "golden_chunks": [{"doc_uuid": "adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823", "index": 0, "content": "#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiLoggingMock {\n\nusing namespace std::literals;\n\nclass Log : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"wasi-logging\"sv);\n return Unexpect(ErrCode::Value::HostFuncError);\n }\n};\n\n} // namespace WasiLoggingMock\n} // namespace Host\n} // namespace WasmEdge", "meta": {"hash_id": "9dd8e8fc28fc298b84bd1c681dd2fa969c2634ef670d1e6c41d4c9214b95c176"}}], "meta": {"doctype": "codebase", "relative_path": "/include/host/mock/wasi_logging_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What is the expected behavior of the `body` method of the `Log` class?", "answer": "Based on the implementation, the `body` method of the `Log` class is expected to print a mock message using `printPluginMock` with the string `\"wasi-logging\"`. It then returns an `Unexpect` object containing the error code `ErrCode::Value::HostFuncError`. This suggests that the `body` method is a mock implementation and does not perform actual logging functionality.", "golden_doc_uuids": ["adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823"], "golden_chunk_uuids": [["adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823", 0]], "golden_documents": [{"uuid": "adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823", "content": "#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiLoggingMock {\n\nusing namespace std::literals;\n\nclass Log : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"wasi-logging\"sv);\n return Unexpect(ErrCode::Value::HostFuncError);\n }\n};\n\n} // namespace WasiLoggingMock\n} // namespace Host\n} // namespace WasmEdge", "meta": {"doctype": "codebase", "relative_path": "/include/host/mock/wasi_logging_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823", "index": 0, "content": "#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiLoggingMock {\n\nusing namespace std::literals;\n\nclass Log : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"wasi-logging\"sv);\n return Unexpect(ErrCode::Value::HostFuncError);\n }\n};\n\n} // namespace WasiLoggingMock\n} // namespace Host\n} // namespace WasmEdge", "meta": {"hash_id": "9dd8e8fc28fc298b84bd1c681dd2fa969c2634ef670d1e6c41d4c9214b95c176"}}]}], "golden_chunks": [{"doc_uuid": "adf8199b3afd25d13810e101d05c538903c40967c595241fa0af4553cb5ef823", "index": 0, "content": "#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiLoggingMock {\n\nusing namespace std::literals;\n\nclass Log : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"wasi-logging\"sv);\n return Unexpect(ErrCode::Value::HostFuncError);\n }\n};\n\n} // namespace WasiLoggingMock\n} // namespace Host\n} // namespace WasmEdge", "meta": {"hash_id": "9dd8e8fc28fc298b84bd1c681dd2fa969c2634ef670d1e6c41d4c9214b95c176"}}], "meta": {"doctype": "codebase", "relative_path": "/include/host/mock/wasi_logging_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `procRaise` function in the `Environ` class?", "answer": "The `procRaise` function is responsible for raising a signal specified by the `__wasi_signal_t` parameter. It maps the WASI signal to the corresponding system signal using a switch statement, and then calls the `std::raise` function to raise the signal. If the signal is not supported, it returns a `WasiUnexpect` with the `__WASI_ERRNO_NOTSUP` error code.", "golden_doc_uuids": ["1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84"], "golden_chunk_uuids": [["1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", 0]], "golden_documents": [{"uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"common/defines.h\"\n#if WASMEDGE_OS_MACOS\n\n#include \"common/errcode.h\"\n#include \"host/wasi/environ.h\"\n#include \"macos.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WASI {\n\nWasiExpect Environ::procRaise(__wasi_signal_t Signal) const noexcept {\n int SysSignal;\n switch (Signal) {\n case __WASI_SIGNAL_NONE:\n SysSignal = 0;\n break;\n case __WASI_SIGNAL_HUP:\n SysSignal = SIGHUP;\n break;\n case __WASI_SIGNAL_INT:\n SysSignal = SIGINT;\n break;\n case __WASI_SIGNAL_QUIT:\n SysSignal = SIGQUIT;\n break;\n case __WASI_SIGNAL_ILL:\n SysSignal = SIGILL;\n break;\n case __WASI_SIGNAL_TRAP:\n SysSignal = SIGTRAP;\n break;\n case __WASI_SIGNAL_ABRT:\n SysSignal = SIGABRT;\n break;\n case __WASI_SIGNAL_BUS:\n SysSignal = SIGBUS;\n break;\n case __WASI_SIGNAL_FPE:\n SysSignal = SIGFPE;\n break;\n case __WASI_SIGNAL_KILL:\n SysSignal = SIGKILL;\n break;\n case __WASI_SIGNAL_USR1:\n SysSignal = SIGUSR1;\n break;\n case __WASI_SIGNAL_SEGV:\n SysSignal = SIGSEGV;\n break;\n case __WASI_SIGNAL_USR2:\n SysSignal = SIGUSR2;\n break;\n case __WASI_SIGNAL_PIPE:\n SysSignal = SIGPIPE;\n break;\n case __WASI_SIGNAL_ALRM:\n SysSignal = SIGALRM;\n break;\n case __WASI_SIGNAL_TERM:\n SysSignal = SIGTERM;\n break;\n case __WASI_SIGNAL_CHLD:\n SysSignal = SIGCHLD;\n break;\n case __WASI_SIGNAL_CONT:\n SysSignal = SIGCONT;\n break;\n case __WASI_SIGNAL_STOP:\n SysSignal = SIGSTOP;\n break;\n case __WASI_SIGNAL_TSTP:\n SysSignal = SIGTSTP;\n break;\n case __WASI_SIGNAL_TTIN:\n SysSignal = SIGTTIN;\n break;\n case __WASI_SIGNAL_TTOU:\n SysSignal = SIGTTOU;\n break;\n case __WASI_SIGNAL_URG:\n SysSignal = SIGURG;\n break;\n case __WASI_SIGNAL_XCPU:\n SysSignal = SIGXCPU;\n break;\n case __WASI_SIGNAL_XFSZ:\n SysSignal = SIGXFSZ;\n break;\n case __WASI_SIGNAL_VTALRM:\n SysSignal = SIGVTALRM;\n break;\n case __WASI_SIGNAL_PROF:\n SysSignal = SIGPROF;\n break;\n case __WASI_SIGNAL_WINCH:\n SysSignal = SIGWINCH;\n break;\n case __WASI_SIGNAL_SYS:\n SysSignal = SIGSYS;\n break;\n case __WASI_SIGNAL_POLL:\n case __WASI_SIGNAL_PWR:\n default:\n return WasiUnexpect(__WASI_ERRNO_NOTSUP);\n }\n if (auto Res = std::raise(SysSignal); Res != 0) {\n return WasiUnexpect(fromErrNo(errno));\n }\n return {};\n}\n\nWasiExpect Environ::schedYield() const noexcept {\n ::sched_yield();\n return {};\n}\n\n} // namespace WASI\n} // namespace Host\n} // namespace WasmEdge\n\n#endif\n", "meta": {"doctype": "codebase", "relative_path": "/lib/host/wasi/environ-macos.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"common/defines.h\"\n#if WASMEDGE_OS_MACOS\n\n#include \"common/errcode.h\"\n#include \"host/wasi/environ.h\"\n#include \"macos.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WASI {\n\nWasiExpect Environ::procRaise(__wasi_signal_t Signal) const noexcept {\n int SysSignal;\n switch (Signal) {\n case __WASI_SIGNAL_NONE:\n SysSignal = 0;\n break;\n case __WASI_SIGNAL_HUP:\n SysSignal = SIGHUP;\n break;\n case __WASI_SIGNAL_INT:\n SysSignal = SIGINT;\n break;\n case __WASI_SIGNAL_QUIT:\n SysSignal = SIGQUIT;\n break;\n case __WASI_SIGNAL_ILL:\n SysSignal = SIGILL;\n break;\n case __WASI_SIGNAL_TRAP:\n SysSignal = SIGTRAP;\n break;\n case __WASI_SIGNAL_ABRT:\n SysSignal = SIGABRT;\n break;\n case __WASI_SIGNAL_BUS:\n SysSignal = SIGBUS;\n break;\n case __WASI_SIGNAL_FPE:\n SysSignal = SIGFPE;\n break;\n", "meta": {"hash_id": "db98fa11a37fb86546589ddd821da22e564566a9d5359ebeb829191430a57434"}}, {"doc_uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", "index": 1, "content": " case __WASI_SIGNAL_KILL:\n SysSignal = SIGKILL;\n break;\n case __WASI_SIGNAL_USR1:\n SysSignal = SIGUSR1;\n break;\n case __WASI_SIGNAL_SEGV:\n SysSignal = SIGSEGV;\n break;\n case __WASI_SIGNAL_USR2:\n SysSignal = SIGUSR2;\n break;\n case __WASI_SIGNAL_PIPE:\n SysSignal = SIGPIPE;\n break;\n case __WASI_SIGNAL_ALRM:\n SysSignal = SIGALRM;\n break;\n case __WASI_SIGNAL_TERM:\n SysSignal = SIGTERM;\n break;\n case __WASI_SIGNAL_CHLD:\n SysSignal = SIGCHLD;\n break;\n case __WASI_SIGNAL_CONT:\n SysSignal = SIGCONT;\n break;\n case __WASI_SIGNAL_STOP:\n SysSignal = SIGSTOP;\n break;\n case __WASI_SIGNAL_TSTP:\n SysSignal = SIGTSTP;\n break;\n case __WASI_SIGNAL_TTIN:\n SysSignal = SIGTTIN;\n break;\n case __WASI_SIGNAL_TTOU:\n SysSignal = SIGTTOU;\n break;\n case __WASI_SIGNAL_URG:\n SysSignal = SIGURG;\n break;\n case __WASI_SIGNAL_XCPU:\n SysSignal = SIGXCPU;\n break;\n", "meta": {"hash_id": "b914df28a2296ca639d08d870242a65e551afbb6269b52b7dd3370e0fc7ec3a5"}}, {"doc_uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", "index": 2, "content": " case __WASI_SIGNAL_XFSZ:\n SysSignal = SIGXFSZ;\n break;\n case __WASI_SIGNAL_VTALRM:\n SysSignal = SIGVTALRM;\n break;\n case __WASI_SIGNAL_PROF:\n SysSignal = SIGPROF;\n break;\n case __WASI_SIGNAL_WINCH:\n SysSignal = SIGWINCH;\n break;\n case __WASI_SIGNAL_SYS:\n SysSignal = SIGSYS;\n break;\n case __WASI_SIGNAL_POLL:\n case __WASI_SIGNAL_PWR:\n default:\n return WasiUnexpect(__WASI_ERRNO_NOTSUP);\n }\n if (auto Res = std::raise(SysSignal); Res != 0) {\n return WasiUnexpect(fromErrNo(errno));\n }\n return {};\n}\n\nWasiExpect Environ::schedYield() const noexcept {\n ::sched_yield();\n return {};\n}\n\n} // namespace WASI\n} // namespace Host\n} // namespace WasmEdge\n\n#endif\n", "meta": {"hash_id": "4ddf9d0bf6bfca31cc42595dd377ee0c09458e146472b4ec9a7ef974f106b884"}}]}], "golden_chunks": [{"doc_uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"common/defines.h\"\n#if WASMEDGE_OS_MACOS\n\n#include \"common/errcode.h\"\n#include \"host/wasi/environ.h\"\n#include \"macos.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WASI {\n\nWasiExpect Environ::procRaise(__wasi_signal_t Signal) const noexcept {\n int SysSignal;\n switch (Signal) {\n case __WASI_SIGNAL_NONE:\n SysSignal = 0;\n break;\n case __WASI_SIGNAL_HUP:\n SysSignal = SIGHUP;\n break;\n case __WASI_SIGNAL_INT:\n SysSignal = SIGINT;\n break;\n case __WASI_SIGNAL_QUIT:\n SysSignal = SIGQUIT;\n break;\n case __WASI_SIGNAL_ILL:\n SysSignal = SIGILL;\n break;\n case __WASI_SIGNAL_TRAP:\n SysSignal = SIGTRAP;\n break;\n case __WASI_SIGNAL_ABRT:\n SysSignal = SIGABRT;\n break;\n case __WASI_SIGNAL_BUS:\n SysSignal = SIGBUS;\n break;\n case __WASI_SIGNAL_FPE:\n SysSignal = SIGFPE;\n break;\n", "meta": {"hash_id": "db98fa11a37fb86546589ddd821da22e564566a9d5359ebeb829191430a57434"}}], "meta": {"doctype": "codebase", "relative_path": "/lib/host/wasi/environ-macos.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How does the `procRaise` function handle unsupported signals?", "answer": "When the `procRaise` function encounters an unsupported signal (i.e., `__WASI_SIGNAL_POLL`, `__WASI_SIGNAL_PWR`, or any other unknown signal), it returns a `WasiUnexpect` object with the `__WASI_ERRNO_NOTSUP` error code, indicating that the signal is not supported. This can be seen in the following code snippet:\n\ncase __WASI_SIGNAL_POLL:\ncase __WASI_SIGNAL_PWR:\ndefault:\n return WasiUnexpect(__WASI_ERRNO_NOTSUP);\n", "golden_doc_uuids": ["1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84"], "golden_chunk_uuids": [["1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", 2]], "golden_documents": [{"uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"common/defines.h\"\n#if WASMEDGE_OS_MACOS\n\n#include \"common/errcode.h\"\n#include \"host/wasi/environ.h\"\n#include \"macos.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WASI {\n\nWasiExpect Environ::procRaise(__wasi_signal_t Signal) const noexcept {\n int SysSignal;\n switch (Signal) {\n case __WASI_SIGNAL_NONE:\n SysSignal = 0;\n break;\n case __WASI_SIGNAL_HUP:\n SysSignal = SIGHUP;\n break;\n case __WASI_SIGNAL_INT:\n SysSignal = SIGINT;\n break;\n case __WASI_SIGNAL_QUIT:\n SysSignal = SIGQUIT;\n break;\n case __WASI_SIGNAL_ILL:\n SysSignal = SIGILL;\n break;\n case __WASI_SIGNAL_TRAP:\n SysSignal = SIGTRAP;\n break;\n case __WASI_SIGNAL_ABRT:\n SysSignal = SIGABRT;\n break;\n case __WASI_SIGNAL_BUS:\n SysSignal = SIGBUS;\n break;\n case __WASI_SIGNAL_FPE:\n SysSignal = SIGFPE;\n break;\n case __WASI_SIGNAL_KILL:\n SysSignal = SIGKILL;\n break;\n case __WASI_SIGNAL_USR1:\n SysSignal = SIGUSR1;\n break;\n case __WASI_SIGNAL_SEGV:\n SysSignal = SIGSEGV;\n break;\n case __WASI_SIGNAL_USR2:\n SysSignal = SIGUSR2;\n break;\n case __WASI_SIGNAL_PIPE:\n SysSignal = SIGPIPE;\n break;\n case __WASI_SIGNAL_ALRM:\n SysSignal = SIGALRM;\n break;\n case __WASI_SIGNAL_TERM:\n SysSignal = SIGTERM;\n break;\n case __WASI_SIGNAL_CHLD:\n SysSignal = SIGCHLD;\n break;\n case __WASI_SIGNAL_CONT:\n SysSignal = SIGCONT;\n break;\n case __WASI_SIGNAL_STOP:\n SysSignal = SIGSTOP;\n break;\n case __WASI_SIGNAL_TSTP:\n SysSignal = SIGTSTP;\n break;\n case __WASI_SIGNAL_TTIN:\n SysSignal = SIGTTIN;\n break;\n case __WASI_SIGNAL_TTOU:\n SysSignal = SIGTTOU;\n break;\n case __WASI_SIGNAL_URG:\n SysSignal = SIGURG;\n break;\n case __WASI_SIGNAL_XCPU:\n SysSignal = SIGXCPU;\n break;\n case __WASI_SIGNAL_XFSZ:\n SysSignal = SIGXFSZ;\n break;\n case __WASI_SIGNAL_VTALRM:\n SysSignal = SIGVTALRM;\n break;\n case __WASI_SIGNAL_PROF:\n SysSignal = SIGPROF;\n break;\n case __WASI_SIGNAL_WINCH:\n SysSignal = SIGWINCH;\n break;\n case __WASI_SIGNAL_SYS:\n SysSignal = SIGSYS;\n break;\n case __WASI_SIGNAL_POLL:\n case __WASI_SIGNAL_PWR:\n default:\n return WasiUnexpect(__WASI_ERRNO_NOTSUP);\n }\n if (auto Res = std::raise(SysSignal); Res != 0) {\n return WasiUnexpect(fromErrNo(errno));\n }\n return {};\n}\n\nWasiExpect Environ::schedYield() const noexcept {\n ::sched_yield();\n return {};\n}\n\n} // namespace WASI\n} // namespace Host\n} // namespace WasmEdge\n\n#endif\n", "meta": {"doctype": "codebase", "relative_path": "/lib/host/wasi/environ-macos.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"common/defines.h\"\n#if WASMEDGE_OS_MACOS\n\n#include \"common/errcode.h\"\n#include \"host/wasi/environ.h\"\n#include \"macos.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WASI {\n\nWasiExpect Environ::procRaise(__wasi_signal_t Signal) const noexcept {\n int SysSignal;\n switch (Signal) {\n case __WASI_SIGNAL_NONE:\n SysSignal = 0;\n break;\n case __WASI_SIGNAL_HUP:\n SysSignal = SIGHUP;\n break;\n case __WASI_SIGNAL_INT:\n SysSignal = SIGINT;\n break;\n case __WASI_SIGNAL_QUIT:\n SysSignal = SIGQUIT;\n break;\n case __WASI_SIGNAL_ILL:\n SysSignal = SIGILL;\n break;\n case __WASI_SIGNAL_TRAP:\n SysSignal = SIGTRAP;\n break;\n case __WASI_SIGNAL_ABRT:\n SysSignal = SIGABRT;\n break;\n case __WASI_SIGNAL_BUS:\n SysSignal = SIGBUS;\n break;\n case __WASI_SIGNAL_FPE:\n SysSignal = SIGFPE;\n break;\n", "meta": {"hash_id": "db98fa11a37fb86546589ddd821da22e564566a9d5359ebeb829191430a57434"}}, {"doc_uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", "index": 1, "content": " case __WASI_SIGNAL_KILL:\n SysSignal = SIGKILL;\n break;\n case __WASI_SIGNAL_USR1:\n SysSignal = SIGUSR1;\n break;\n case __WASI_SIGNAL_SEGV:\n SysSignal = SIGSEGV;\n break;\n case __WASI_SIGNAL_USR2:\n SysSignal = SIGUSR2;\n break;\n case __WASI_SIGNAL_PIPE:\n SysSignal = SIGPIPE;\n break;\n case __WASI_SIGNAL_ALRM:\n SysSignal = SIGALRM;\n break;\n case __WASI_SIGNAL_TERM:\n SysSignal = SIGTERM;\n break;\n case __WASI_SIGNAL_CHLD:\n SysSignal = SIGCHLD;\n break;\n case __WASI_SIGNAL_CONT:\n SysSignal = SIGCONT;\n break;\n case __WASI_SIGNAL_STOP:\n SysSignal = SIGSTOP;\n break;\n case __WASI_SIGNAL_TSTP:\n SysSignal = SIGTSTP;\n break;\n case __WASI_SIGNAL_TTIN:\n SysSignal = SIGTTIN;\n break;\n case __WASI_SIGNAL_TTOU:\n SysSignal = SIGTTOU;\n break;\n case __WASI_SIGNAL_URG:\n SysSignal = SIGURG;\n break;\n case __WASI_SIGNAL_XCPU:\n SysSignal = SIGXCPU;\n break;\n", "meta": {"hash_id": "b914df28a2296ca639d08d870242a65e551afbb6269b52b7dd3370e0fc7ec3a5"}}, {"doc_uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", "index": 2, "content": " case __WASI_SIGNAL_XFSZ:\n SysSignal = SIGXFSZ;\n break;\n case __WASI_SIGNAL_VTALRM:\n SysSignal = SIGVTALRM;\n break;\n case __WASI_SIGNAL_PROF:\n SysSignal = SIGPROF;\n break;\n case __WASI_SIGNAL_WINCH:\n SysSignal = SIGWINCH;\n break;\n case __WASI_SIGNAL_SYS:\n SysSignal = SIGSYS;\n break;\n case __WASI_SIGNAL_POLL:\n case __WASI_SIGNAL_PWR:\n default:\n return WasiUnexpect(__WASI_ERRNO_NOTSUP);\n }\n if (auto Res = std::raise(SysSignal); Res != 0) {\n return WasiUnexpect(fromErrNo(errno));\n }\n return {};\n}\n\nWasiExpect Environ::schedYield() const noexcept {\n ::sched_yield();\n return {};\n}\n\n} // namespace WASI\n} // namespace Host\n} // namespace WasmEdge\n\n#endif\n", "meta": {"hash_id": "4ddf9d0bf6bfca31cc42595dd377ee0c09458e146472b4ec9a7ef974f106b884"}}]}], "golden_chunks": [{"doc_uuid": "1aebf7d6ad261452293b74e53b8b7afd8c068e3a8e3d2b7216a21055d2ac9a84", "index": 2, "content": " case __WASI_SIGNAL_XFSZ:\n SysSignal = SIGXFSZ;\n break;\n case __WASI_SIGNAL_VTALRM:\n SysSignal = SIGVTALRM;\n break;\n case __WASI_SIGNAL_PROF:\n SysSignal = SIGPROF;\n break;\n case __WASI_SIGNAL_WINCH:\n SysSignal = SIGWINCH;\n break;\n case __WASI_SIGNAL_SYS:\n SysSignal = SIGSYS;\n break;\n case __WASI_SIGNAL_POLL:\n case __WASI_SIGNAL_PWR:\n default:\n return WasiUnexpect(__WASI_ERRNO_NOTSUP);\n }\n if (auto Res = std::raise(SysSignal); Res != 0) {\n return WasiUnexpect(fromErrNo(errno));\n }\n return {};\n}\n\nWasiExpect Environ::schedYield() const noexcept {\n ::sched_yield();\n return {};\n}\n\n} // namespace WASI\n} // namespace Host\n} // namespace WasmEdge\n\n#endif\n", "meta": {"hash_id": "4ddf9d0bf6bfca31cc42595dd377ee0c09458e146472b4ec9a7ef974f106b884"}}], "meta": {"doctype": "codebase", "relative_path": "/lib/host/wasi/environ-macos.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `printPluginMock` function?", "answer": "The `printPluginMock` function is called in the `body` methods of all the mock classes in the `WasiCryptoMock` namespace. It takes a string literal \"WASI-Crypto\" as an argument. Based on the name, it likely prints a message indicating that a mock implementation of a WASI-Crypto function is being used. For example, in the `KeypairGenerate` class:\n\nExpect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n}\n", "golden_doc_uuids": ["9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582"], "golden_chunk_uuids": [["9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", 0]], "golden_documents": [{"uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCryptoMock {\n\nusing namespace std::literals;\nstatic inline constexpr const uint32_t kWASICryptoError = 1U;\n\nnamespace Common {\nclass ArrayOutputLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass ArrayOutputPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetGuestBuffer\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerInvalidate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Common\n\nnamespace AsymmetricCommon {\nclass KeypairGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairGenerateManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairReplaceManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromPkAndSk : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairPublickey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairSecretkey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyFromSecretkey\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace AsymmetricCommon\n\nnamespace Kx {\nclass Dh : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Encapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Decapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Kx\n\nnamespace Signatures {\nclass Export : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Import : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateUpdate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSign : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateOpen\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateUpdate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateVerify\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateClose\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Close : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n} // namespace Signatures\n\nnamespace Symmetric {\nclass KeyGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyGenerateManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyReplaceManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClone : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateAbsorb : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueeze : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeTag : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeKey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateMaxTagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateRatchet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Symmetric\n\n} // namespace WasiCryptoMock\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/include/host/mock/wasi_crypto_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 33, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCryptoMock {\n\nusing namespace std::literals;\nstatic inline constexpr const uint32_t kWASICryptoError = 1U;\n\nnamespace Common {\nclass ArrayOutputLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass ArrayOutputPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "eddf3231983ad72c3f49aaff2b388eaf21f103c2b9e9fbd79c675da503bb0d92"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 1, "content": "class OptionsOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "effbe057769f32b9264ef50474dc41a221e25f9bdcd409aae6ed50b99d779747"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 2, "content": "class OptionsSet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "39dafe4e4109d2962545b6320f9626656ce68fc61a93c9b71666d0d633acdd69"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 3, "content": "class OptionsSetGuestBuffer\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "1882262848695f4737e4b1a446695cca2c7ea771fadf7a9d8b70cb8e50afeab3"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 4, "content": "class SecretsManagerInvalidate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Common\n\nnamespace AsymmetricCommon {\nclass KeypairGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "a68fa5c0f8e17d495e03964535f3ff2b2659f8d9fbb250f5b172435d329ba1be"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 5, "content": "class KeypairGenerateManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "7628fca1d0b604b6d42f18ec7ae8f6ad426d68e85ee3f7128b9e6fb53715b1db"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 6, "content": "class KeypairReplaceManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "9ecf1a6f41bd0d0bdb893b95d5cd9b7d172232afd897677ca4f82445af119b48"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 7, "content": "class KeypairFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromPkAndSk : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "359c6755299beaff7c7e93cb459b577f115e7b6fb8d6b66554bd4649146fae24"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 8, "content": "class KeypairExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairPublickey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "095db419aaecf76f02234b499316c8f13c0bdff45f30f8d151785ebfa120318c"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 9, "content": "class KeypairSecretkey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "14dec474f9a5671562c187abd8b0d2c95e8b5c6a3e201ac4459fdfc2c2ef05f2"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 10, "content": "class PublickeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "fec16208c07280ade429461a9d8bd288d40b4cef1141b179a8bdd799db548cd7"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 11, "content": "class PublickeyFromSecretkey\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "a9968f085a2b4653f38df5ad92250f582ab3e5d049c1b3959c5da0169c35e26c"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 12, "content": "class SecretkeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "87a74f274bdd57f9157065558af65322d035f0678993ed6273fcb78bcf9c85ca"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 13, "content": "class SecretkeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace AsymmetricCommon\n\nnamespace Kx {\nclass Dh : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "4fee4c7c058774d502aad9ed547c797fbc5e2015aefca823aa62788c8a42b421"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 14, "content": "class Encapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Decapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Kx\n\n", "meta": {"hash_id": "e576a657b46206f45950ee9dea287a415af7b0c5d6709e2763b01884f52fdf60"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 15, "content": "namespace Signatures {\nclass Export : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Import : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "fd8e0dcf9c24fa9ed2dcab62c6c0c5e09d8e72f06306e1bceeeec154b3e8b879"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 16, "content": "class StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateUpdate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "c3f5a79fbf77f7728920e7fa096da21ad05837e8a3e59695fe990937dcbbae95"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 17, "content": "class StateSign : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "c552bc1099290eddb55308c82b578f615ac79df07a0ee2497c42b3f3eb466aa9"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 18, "content": "class VerificationStateOpen\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateUpdate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "427c878f2ef7c38bfc0df2cd759507f15affed89e3968c798faa28c71798987d"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 19, "content": "class VerificationStateVerify\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateClose\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Close : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n} // namespace Signatures\n\n", "meta": {"hash_id": "28ffbc6ff4c957cd618dcc929907e88c7d804e3cd83aa9eef17fcc554cb63bb2"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 20, "content": "namespace Symmetric {\nclass KeyGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "b4f5872cc3eeb086f86d2708606c92952a131254d86cb50352d7a18ec59bb5e0"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 21, "content": "class KeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyGenerateManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "42f808a72f5031a1b95202b4db63085c9065d7dca58399d2706eb18bc2c2a58b"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 22, "content": "class KeyStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyReplaceManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "93824f89112e13f82d0da060edccacec404e0cc1d431c3c66355180c90cb1a65"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 23, "content": "class KeyId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "a39fa6bccad5af8c3dabd83c21eda3c16f8685452aa064b9372791b9e96f4605"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 24, "content": "class StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClone : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "a4d9c0d1a9c27bf46b98450e996710be81443f608ba750000700641dc88de69f"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 25, "content": "class StateOptionsGet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "32558fe7dbfbe7a0cb64465e728edd08e227dd56c7004f6c34ece55be3528b9c"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 26, "content": "class StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateAbsorb : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "2a4a99f50d693ac31b5e0200e57d0b61212a4188db1f07af4a0030e81d9a74f5"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 27, "content": "class StateSqueeze : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeTag : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "19e8f038b96f4db488c61f29b9026181d5d12fb14cf7db9e48d94ffc2339b895"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 28, "content": "class StateSqueezeKey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateMaxTagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "6b7473ae31458e0ac158ab29b1693029231e6c24eb515672de5551e8919b371e"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 29, "content": "class StateEncrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "85f02951760940c748103194b8aa1783c18be482ad3969413a0a240cde3bc7c4"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 30, "content": "class StateDecrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "36ef288444d3e9a31b57494f550b889a2954b4da547271c3d2f4398e6b9cf261"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 31, "content": "class StateRatchet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "2e0513a1c1a4632cad0fa2a4a923f85402c9ae6621bdbbd8b4f9c6e1fa6e392a"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 32, "content": "class TagPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Symmetric\n\n} // namespace WasiCryptoMock\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "df1e52b31454061f0043b08e90276cb5d7605479b14d88e0c3659a91dffcd445"}}]}], "golden_chunks": [{"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCryptoMock {\n\nusing namespace std::literals;\nstatic inline constexpr const uint32_t kWASICryptoError = 1U;\n\nnamespace Common {\nclass ArrayOutputLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass ArrayOutputPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "eddf3231983ad72c3f49aaff2b388eaf21f103c2b9e9fbd79c675da503bb0d92"}}], "meta": {"doctype": "codebase", "relative_path": "/include/host/mock/wasi_crypto_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 33, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `body` method in the mock classes?", "answer": "The `body` method in each mock class represents the actual implementation of the corresponding WASI-Crypto function. It takes a `Runtime::CallingFrame` reference and a set of arguments depending on the specific function being mocked. In all cases, the `body` method calls `printPluginMock` with \"WASI-Crypto\" as the argument and returns `kWASICryptoError`. For example, in the `KeypairImport` class:\n\nExpect body(const Runtime::CallingFrame &, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n}\n", "golden_doc_uuids": ["9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582"], "golden_chunk_uuids": [["9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", 0]], "golden_documents": [{"uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCryptoMock {\n\nusing namespace std::literals;\nstatic inline constexpr const uint32_t kWASICryptoError = 1U;\n\nnamespace Common {\nclass ArrayOutputLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass ArrayOutputPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetGuestBuffer\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerInvalidate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Common\n\nnamespace AsymmetricCommon {\nclass KeypairGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairGenerateManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairReplaceManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromPkAndSk : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairPublickey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairSecretkey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyFromSecretkey\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace AsymmetricCommon\n\nnamespace Kx {\nclass Dh : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Encapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Decapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Kx\n\nnamespace Signatures {\nclass Export : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Import : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateUpdate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSign : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateOpen\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateUpdate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateVerify\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateClose\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Close : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n} // namespace Signatures\n\nnamespace Symmetric {\nclass KeyGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyGenerateManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyReplaceManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClone : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateAbsorb : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueeze : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeTag : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeKey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateMaxTagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateRatchet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Symmetric\n\n} // namespace WasiCryptoMock\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/include/host/mock/wasi_crypto_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 33, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCryptoMock {\n\nusing namespace std::literals;\nstatic inline constexpr const uint32_t kWASICryptoError = 1U;\n\nnamespace Common {\nclass ArrayOutputLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass ArrayOutputPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "eddf3231983ad72c3f49aaff2b388eaf21f103c2b9e9fbd79c675da503bb0d92"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 1, "content": "class OptionsOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "effbe057769f32b9264ef50474dc41a221e25f9bdcd409aae6ed50b99d779747"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 2, "content": "class OptionsSet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass OptionsSetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "39dafe4e4109d2962545b6320f9626656ce68fc61a93c9b71666d0d633acdd69"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 3, "content": "class OptionsSetGuestBuffer\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretsManagerClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "1882262848695f4737e4b1a446695cca2c7ea771fadf7a9d8b70cb8e50afeab3"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 4, "content": "class SecretsManagerInvalidate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Common\n\nnamespace AsymmetricCommon {\nclass KeypairGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "a68fa5c0f8e17d495e03964535f3ff2b2659f8d9fbb250f5b172435d329ba1be"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 5, "content": "class KeypairGenerateManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "7628fca1d0b604b6d42f18ec7ae8f6ad426d68e85ee3f7128b9e6fb53715b1db"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 6, "content": "class KeypairReplaceManaged\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "9ecf1a6f41bd0d0bdb893b95d5cd9b7d172232afd897677ca4f82445af119b48"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 7, "content": "class KeypairFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairFromPkAndSk : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "359c6755299beaff7c7e93cb459b577f115e7b6fb8d6b66554bd4649146fae24"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 8, "content": "class KeypairExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairPublickey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "095db419aaecf76f02234b499316c8f13c0bdff45f30f8d151785ebfa120318c"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 9, "content": "class KeypairSecretkey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeypairClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "14dec474f9a5671562c187abd8b0d2c95e8b5c6a3e201ac4459fdfc2c2ef05f2"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 10, "content": "class PublickeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "fec16208c07280ade429461a9d8bd288d40b4cef1141b179a8bdd799db548cd7"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 11, "content": "class PublickeyFromSecretkey\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass PublickeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "a9968f085a2b4653f38df5ad92250f582ab3e5d049c1b3959c5da0169c35e26c"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 12, "content": "class SecretkeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass SecretkeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "87a74f274bdd57f9157065558af65322d035f0678993ed6273fcb78bcf9c85ca"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 13, "content": "class SecretkeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace AsymmetricCommon\n\nnamespace Kx {\nclass Dh : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "4fee4c7c058774d502aad9ed547c797fbc5e2015aefca823aa62788c8a42b421"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 14, "content": "class Encapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Decapsulate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Kx\n\n", "meta": {"hash_id": "e576a657b46206f45950ee9dea287a415af7b0c5d6709e2763b01884f52fdf60"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 15, "content": "namespace Signatures {\nclass Export : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Import : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "fd8e0dcf9c24fa9ed2dcab62c6c0c5e09d8e72f06306e1bceeeec154b3e8b879"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 16, "content": "class StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateUpdate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "c3f5a79fbf77f7728920e7fa096da21ad05837e8a3e59695fe990937dcbbae95"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 17, "content": "class StateSign : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "c552bc1099290eddb55308c82b578f615ac79df07a0ee2497c42b3f3eb466aa9"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 18, "content": "class VerificationStateOpen\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateUpdate\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "427c878f2ef7c38bfc0df2cd759507f15affed89e3968c798faa28c71798987d"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 19, "content": "class VerificationStateVerify\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass VerificationStateClose\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass Close : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n} // namespace Signatures\n\n", "meta": {"hash_id": "28ffbc6ff4c957cd618dcc929907e88c7d804e3cd83aa9eef17fcc554cb63bb2"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 20, "content": "namespace Symmetric {\nclass KeyGenerate : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyImport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "b4f5872cc3eeb086f86d2708606c92952a131254d86cb50352d7a18ec59bb5e0"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 21, "content": "class KeyExport : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyGenerateManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "42f808a72f5031a1b95202b4db63085c9065d7dca58399d2706eb18bc2c2a58b"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 22, "content": "class KeyStoreManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyReplaceManaged : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, int32_t,\n int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "93824f89112e13f82d0da060edccacec404e0cc1d431c3c66355180c90cb1a65"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 23, "content": "class KeyId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass KeyFromId : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint64_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "a39fa6bccad5af8c3dabd83c21eda3c16f8685452aa064b9372791b9e96f4605"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 24, "content": "class StateOpen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, uint32_t, uint32_t,\n uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateClone : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "a4d9c0d1a9c27bf46b98450e996710be81443f608ba750000700641dc88de69f"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 25, "content": "class StateOptionsGet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateOptionsGetU64 : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "32558fe7dbfbe7a0cb64465e728edd08e227dd56c7004f6c34ece55be3528b9c"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 26, "content": "class StateClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateAbsorb : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "2a4a99f50d693ac31b5e0200e57d0b61212a4188db1f07af4a0030e81d9a74f5"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 27, "content": "class StateSqueeze : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateSqueezeTag : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "19e8f038b96f4db488c61f29b9026181d5d12fb14cf7db9e48d94ffc2339b895"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 28, "content": "class StateSqueezeKey : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateMaxTagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "6b7473ae31458e0ac158ab29b1693029231e6c24eb515672de5551e8919b371e"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 29, "content": "class StateEncrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateEncryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "85f02951760940c748103194b8aa1783c18be482ad3969413a0a240cde3bc7c4"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 30, "content": "class StateDecrypt : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass StateDecryptDetached\n : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t, uint32_t, uint32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "36ef288444d3e9a31b57494f550b889a2954b4da547271c3d2f4398e6b9cf261"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 31, "content": "class StateRatchet : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "2e0513a1c1a4632cad0fa2a4a923f85402c9ae6621bdbbd8b4f9c6e1fa6e392a"}}, {"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 32, "content": "class TagPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagVerify : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass TagClose : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n} // namespace Symmetric\n\n} // namespace WasiCryptoMock\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "df1e52b31454061f0043b08e90276cb5d7605479b14d88e0c3659a91dffcd445"}}]}], "golden_chunks": [{"doc_uuid": "9c45438b59c7592ded6528bf3c12861598a182f05c8e4ee65217800a1e030582", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#pragma once\n\n#include \"common/errcode.h\"\n#include \"host/mock/log.h\"\n#include \"runtime/callingframe.h\"\n#include \"runtime/hostfunc.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCryptoMock {\n\nusing namespace std::literals;\nstatic inline constexpr const uint32_t kWASICryptoError = 1U;\n\nnamespace Common {\nclass ArrayOutputLen : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\nclass ArrayOutputPull : public Runtime::HostFunction {\npublic:\n Expect body(const Runtime::CallingFrame &, int32_t, uint32_t,\n uint32_t, uint32_t) {\n printPluginMock(\"WASI-Crypto\"sv);\n return kWASICryptoError;\n }\n};\n\n", "meta": {"hash_id": "eddf3231983ad72c3f49aaff2b388eaf21f103c2b9e9fbd79c675da503bb0d92"}}], "meta": {"doctype": "codebase", "relative_path": "/include/host/mock/wasi_crypto_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 33, "source": "codebases/jsonl"}} +{"query": "How do you access the magic bytes of a Module?", "answer": "The magic bytes can be accessed using the getMagic() methods:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\nThese return const and non-const references to the Magic member variable which is a vector of bytes.", "golden_doc_uuids": ["639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304"], "golden_chunk_uuids": [["639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", 0]], "golden_documents": [{"uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n FunctionSection &getFunctionSection() { return FunctionSec; }\n const TableSection &getTableSection() const { return TableSec; }\n TableSection &getTableSection() { return TableSec; }\n const MemorySection &getMemorySection() const { return MemorySec; }\n MemorySection &getMemorySection() { return MemorySec; }\n const GlobalSection &getGlobalSection() const { return GlobalSec; }\n GlobalSection &getGlobalSection() { return GlobalSec; }\n const ExportSection &getExportSection() const { return ExportSec; }\n ExportSection &getExportSection() { return ExportSec; }\n const StartSection &getStartSection() const { return StartSec; }\n StartSection &getStartSection() { return StartSec; }\n const ElementSection &getElementSection() const { return ElementSec; }\n ElementSection &getElementSection() { return ElementSec; }\n const CodeSection &getCodeSection() const { return CodeSec; }\n CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n /// \\name Section nodes of Module node.\n /// @{\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec;\n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec;\n DataSection DataSec;\n DataCountSection DataCountSec;\n /// @}\n\n /// \\name Data of AOT.\n /// @{\n AOTSection AOTSec;\n Symbol IntrSymbol;\n /// @}\n\n /// \\name Validated flag.\n /// @{\n bool IsValidated = false;\n /// @}\n};\n\nclass CoreModuleSection : public Section {\npublic:\n /// Getter of content.\n const Module &getContent() const noexcept { return Content; }\n Module &getContent() noexcept { return Content; }\n\nprivate:\n Module Content;\n};\n\nnamespace Component {\n\nclass Component {\n using Section =\n std::variant;\n\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n /// Getter of layer vector.\n const std::vector &getLayer() const noexcept { return Layer; }\n std::vector &getLayer() noexcept { return Layer; }\n\n std::vector

      &getSections() noexcept { return Secs; }\n Span getSections() const noexcept { return Secs; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n std::vector Layer;\n\n std::vector
      Secs;\n /// @}\n};\n\n} // namespace Component\n\n} // namespace AST\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/include/ast/module.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n", "meta": {"hash_id": "8c42819a85bddccd1b665ca7a22c5a8be80880bc76acad3692368cb47ebc483b"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 1, "content": " /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n", "meta": {"hash_id": "01bf20ac01f80b12b9b19510a0e57e37f8fdc9a81fd2fa21c147c5a389393faa"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 2, "content": " FunctionSection &getFunctionSection() { return FunctionSec; }\n const TableSection &getTableSection() const { return TableSec; }\n TableSection &getTableSection() { return TableSec; }\n const MemorySection &getMemorySection() const { return MemorySec; }\n MemorySection &getMemorySection() { return MemorySec; }\n const GlobalSection &getGlobalSection() const { return GlobalSec; }\n GlobalSection &getGlobalSection() { return GlobalSec; }\n const ExportSection &getExportSection() const { return ExportSec; }\n ExportSection &getExportSection() { return ExportSec; }\n const StartSection &getStartSection() const { return StartSec; }\n StartSection &getStartSection() { return StartSec; }\n const ElementSection &getElementSection() const { return ElementSec; }\n ElementSection &getElementSection() { return ElementSec; }\n const CodeSection &getCodeSection() const { return CodeSec; }\n", "meta": {"hash_id": "faa983c8c7961942915c3150c2ed1c7b6386a1138b2c95f3bacb4580046717fe"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 3, "content": " CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n", "meta": {"hash_id": "cda2a45fffdfbcaa6094ae3230a1088f1b5ea099cb34cc57ccc54b122ea609a7"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 4, "content": " /// \\name Section nodes of Module node.\n /// @{\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec;\n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec;\n DataSection DataSec;\n DataCountSection DataCountSec;\n /// @}\n\n /// \\name Data of AOT.\n /// @{\n AOTSection AOTSec;\n Symbol IntrSymbol;\n /// @}\n\n /// \\name Validated flag.\n /// @{\n bool IsValidated = false;\n /// @}\n};\n\nclass CoreModuleSection : public Section {\npublic:\n /// Getter of content.\n const Module &getContent() const noexcept { return Content; }\n Module &getContent() noexcept { return Content; }\n\nprivate:\n Module Content;\n};\n\nnamespace Component {\n\n", "meta": {"hash_id": "73938b53f77f46974dc1b2492c1092cac25c4d0949593ca5fc7dd061c56587c8"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 5, "content": "class Component {\n using Section =\n std::variant;\n\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n", "meta": {"hash_id": "a4d9f48bfc8d45373dcb38ccc92c622e9245a67db1be5bcdc5837dbdd97c1e3b"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 6, "content": " /// Getter of layer vector.\n const std::vector &getLayer() const noexcept { return Layer; }\n std::vector &getLayer() noexcept { return Layer; }\n\n std::vector
      &getSections() noexcept { return Secs; }\n Span getSections() const noexcept { return Secs; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n std::vector Layer;\n\n std::vector
      Secs;\n /// @}\n};\n\n} // namespace Component\n\n} // namespace AST\n} // namespace WasmEdge\n", "meta": {"hash_id": "bae55e05dd5a1a51b0b38f1307b9386c12169304cb2c530e50bddfb5d745c4c0"}}]}], "golden_chunks": [{"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n", "meta": {"hash_id": "8c42819a85bddccd1b665ca7a22c5a8be80880bc76acad3692368cb47ebc483b"}}], "meta": {"doctype": "codebase", "relative_path": "/include/ast/module.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "What sections are contained in a Module?", "answer": "A Module contains the following sections, as seen from the member variables:\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec; \n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec; \n DataSection DataSec;\n DataCountSection DataCountSec;\nIt also contains an AOTSection for ahead-of-time compilation:\n AOTSection AOTSec;", "golden_doc_uuids": ["639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304"], "golden_chunk_uuids": [["639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", 1]], "golden_documents": [{"uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n FunctionSection &getFunctionSection() { return FunctionSec; }\n const TableSection &getTableSection() const { return TableSec; }\n TableSection &getTableSection() { return TableSec; }\n const MemorySection &getMemorySection() const { return MemorySec; }\n MemorySection &getMemorySection() { return MemorySec; }\n const GlobalSection &getGlobalSection() const { return GlobalSec; }\n GlobalSection &getGlobalSection() { return GlobalSec; }\n const ExportSection &getExportSection() const { return ExportSec; }\n ExportSection &getExportSection() { return ExportSec; }\n const StartSection &getStartSection() const { return StartSec; }\n StartSection &getStartSection() { return StartSec; }\n const ElementSection &getElementSection() const { return ElementSec; }\n ElementSection &getElementSection() { return ElementSec; }\n const CodeSection &getCodeSection() const { return CodeSec; }\n CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n /// \\name Section nodes of Module node.\n /// @{\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec;\n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec;\n DataSection DataSec;\n DataCountSection DataCountSec;\n /// @}\n\n /// \\name Data of AOT.\n /// @{\n AOTSection AOTSec;\n Symbol IntrSymbol;\n /// @}\n\n /// \\name Validated flag.\n /// @{\n bool IsValidated = false;\n /// @}\n};\n\nclass CoreModuleSection : public Section {\npublic:\n /// Getter of content.\n const Module &getContent() const noexcept { return Content; }\n Module &getContent() noexcept { return Content; }\n\nprivate:\n Module Content;\n};\n\nnamespace Component {\n\nclass Component {\n using Section =\n std::variant;\n\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n /// Getter of layer vector.\n const std::vector &getLayer() const noexcept { return Layer; }\n std::vector &getLayer() noexcept { return Layer; }\n\n std::vector
      &getSections() noexcept { return Secs; }\n Span getSections() const noexcept { return Secs; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n std::vector Layer;\n\n std::vector
      Secs;\n /// @}\n};\n\n} // namespace Component\n\n} // namespace AST\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/include/ast/module.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n", "meta": {"hash_id": "8c42819a85bddccd1b665ca7a22c5a8be80880bc76acad3692368cb47ebc483b"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 1, "content": " /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n", "meta": {"hash_id": "01bf20ac01f80b12b9b19510a0e57e37f8fdc9a81fd2fa21c147c5a389393faa"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 2, "content": " FunctionSection &getFunctionSection() { return FunctionSec; }\n const TableSection &getTableSection() const { return TableSec; }\n TableSection &getTableSection() { return TableSec; }\n const MemorySection &getMemorySection() const { return MemorySec; }\n MemorySection &getMemorySection() { return MemorySec; }\n const GlobalSection &getGlobalSection() const { return GlobalSec; }\n GlobalSection &getGlobalSection() { return GlobalSec; }\n const ExportSection &getExportSection() const { return ExportSec; }\n ExportSection &getExportSection() { return ExportSec; }\n const StartSection &getStartSection() const { return StartSec; }\n StartSection &getStartSection() { return StartSec; }\n const ElementSection &getElementSection() const { return ElementSec; }\n ElementSection &getElementSection() { return ElementSec; }\n const CodeSection &getCodeSection() const { return CodeSec; }\n", "meta": {"hash_id": "faa983c8c7961942915c3150c2ed1c7b6386a1138b2c95f3bacb4580046717fe"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 3, "content": " CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n", "meta": {"hash_id": "cda2a45fffdfbcaa6094ae3230a1088f1b5ea099cb34cc57ccc54b122ea609a7"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 4, "content": " /// \\name Section nodes of Module node.\n /// @{\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec;\n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec;\n DataSection DataSec;\n DataCountSection DataCountSec;\n /// @}\n\n /// \\name Data of AOT.\n /// @{\n AOTSection AOTSec;\n Symbol IntrSymbol;\n /// @}\n\n /// \\name Validated flag.\n /// @{\n bool IsValidated = false;\n /// @}\n};\n\nclass CoreModuleSection : public Section {\npublic:\n /// Getter of content.\n const Module &getContent() const noexcept { return Content; }\n Module &getContent() noexcept { return Content; }\n\nprivate:\n Module Content;\n};\n\nnamespace Component {\n\n", "meta": {"hash_id": "73938b53f77f46974dc1b2492c1092cac25c4d0949593ca5fc7dd061c56587c8"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 5, "content": "class Component {\n using Section =\n std::variant;\n\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n", "meta": {"hash_id": "a4d9f48bfc8d45373dcb38ccc92c622e9245a67db1be5bcdc5837dbdd97c1e3b"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 6, "content": " /// Getter of layer vector.\n const std::vector &getLayer() const noexcept { return Layer; }\n std::vector &getLayer() noexcept { return Layer; }\n\n std::vector
      &getSections() noexcept { return Secs; }\n Span getSections() const noexcept { return Secs; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n std::vector Layer;\n\n std::vector
      Secs;\n /// @}\n};\n\n} // namespace Component\n\n} // namespace AST\n} // namespace WasmEdge\n", "meta": {"hash_id": "bae55e05dd5a1a51b0b38f1307b9386c12169304cb2c530e50bddfb5d745c4c0"}}]}], "golden_chunks": [{"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 1, "content": " /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n", "meta": {"hash_id": "01bf20ac01f80b12b9b19510a0e57e37f8fdc9a81fd2fa21c147c5a389393faa"}}], "meta": {"doctype": "codebase", "relative_path": "/include/ast/module.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "How do you check if a Module has been validated?", "answer": "The Module has an IsValidated flag that indicates if it has been validated or not. This can be checked and set using these methods:\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; } \n void setIsValidated(bool V = true) noexcept { IsValidated = V; }", "golden_doc_uuids": ["639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304"], "golden_chunk_uuids": [["639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", 3]], "golden_documents": [{"uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n FunctionSection &getFunctionSection() { return FunctionSec; }\n const TableSection &getTableSection() const { return TableSec; }\n TableSection &getTableSection() { return TableSec; }\n const MemorySection &getMemorySection() const { return MemorySec; }\n MemorySection &getMemorySection() { return MemorySec; }\n const GlobalSection &getGlobalSection() const { return GlobalSec; }\n GlobalSection &getGlobalSection() { return GlobalSec; }\n const ExportSection &getExportSection() const { return ExportSec; }\n ExportSection &getExportSection() { return ExportSec; }\n const StartSection &getStartSection() const { return StartSec; }\n StartSection &getStartSection() { return StartSec; }\n const ElementSection &getElementSection() const { return ElementSec; }\n ElementSection &getElementSection() { return ElementSec; }\n const CodeSection &getCodeSection() const { return CodeSec; }\n CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n /// \\name Section nodes of Module node.\n /// @{\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec;\n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec;\n DataSection DataSec;\n DataCountSection DataCountSec;\n /// @}\n\n /// \\name Data of AOT.\n /// @{\n AOTSection AOTSec;\n Symbol IntrSymbol;\n /// @}\n\n /// \\name Validated flag.\n /// @{\n bool IsValidated = false;\n /// @}\n};\n\nclass CoreModuleSection : public Section {\npublic:\n /// Getter of content.\n const Module &getContent() const noexcept { return Content; }\n Module &getContent() noexcept { return Content; }\n\nprivate:\n Module Content;\n};\n\nnamespace Component {\n\nclass Component {\n using Section =\n std::variant;\n\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n /// Getter of layer vector.\n const std::vector &getLayer() const noexcept { return Layer; }\n std::vector &getLayer() noexcept { return Layer; }\n\n std::vector
      &getSections() noexcept { return Secs; }\n Span getSections() const noexcept { return Secs; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n std::vector Layer;\n\n std::vector
      Secs;\n /// @}\n};\n\n} // namespace Component\n\n} // namespace AST\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/include/ast/module.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/ast/module.h - Module class definition -------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n///\n/// \\file\n/// This file contains the declaration of the Module node class, which is the\n/// module node in AST.\n///\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"ast/section.h\"\n\n#include \n\nnamespace WasmEdge {\nnamespace AST {\n\n/// AST Module node.\nclass Module {\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n", "meta": {"hash_id": "8c42819a85bddccd1b665ca7a22c5a8be80880bc76acad3692368cb47ebc483b"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 1, "content": " /// Getters of references to sections.\n Span getCustomSections() const noexcept {\n return CustomSecs;\n }\n std::vector &getCustomSections() noexcept {\n return CustomSecs;\n }\n const TypeSection &getTypeSection() const { return TypeSec; }\n TypeSection &getTypeSection() { return TypeSec; }\n const ImportSection &getImportSection() const { return ImportSec; }\n ImportSection &getImportSection() { return ImportSec; }\n const FunctionSection &getFunctionSection() const { return FunctionSec; }\n", "meta": {"hash_id": "01bf20ac01f80b12b9b19510a0e57e37f8fdc9a81fd2fa21c147c5a389393faa"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 2, "content": " FunctionSection &getFunctionSection() { return FunctionSec; }\n const TableSection &getTableSection() const { return TableSec; }\n TableSection &getTableSection() { return TableSec; }\n const MemorySection &getMemorySection() const { return MemorySec; }\n MemorySection &getMemorySection() { return MemorySec; }\n const GlobalSection &getGlobalSection() const { return GlobalSec; }\n GlobalSection &getGlobalSection() { return GlobalSec; }\n const ExportSection &getExportSection() const { return ExportSec; }\n ExportSection &getExportSection() { return ExportSec; }\n const StartSection &getStartSection() const { return StartSec; }\n StartSection &getStartSection() { return StartSec; }\n const ElementSection &getElementSection() const { return ElementSec; }\n ElementSection &getElementSection() { return ElementSec; }\n const CodeSection &getCodeSection() const { return CodeSec; }\n", "meta": {"hash_id": "faa983c8c7961942915c3150c2ed1c7b6386a1138b2c95f3bacb4580046717fe"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 3, "content": " CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n", "meta": {"hash_id": "cda2a45fffdfbcaa6094ae3230a1088f1b5ea099cb34cc57ccc54b122ea609a7"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 4, "content": " /// \\name Section nodes of Module node.\n /// @{\n std::vector CustomSecs;\n TypeSection TypeSec;\n ImportSection ImportSec;\n FunctionSection FunctionSec;\n TableSection TableSec;\n MemorySection MemorySec;\n GlobalSection GlobalSec;\n ExportSection ExportSec;\n StartSection StartSec;\n ElementSection ElementSec;\n CodeSection CodeSec;\n DataSection DataSec;\n DataCountSection DataCountSec;\n /// @}\n\n /// \\name Data of AOT.\n /// @{\n AOTSection AOTSec;\n Symbol IntrSymbol;\n /// @}\n\n /// \\name Validated flag.\n /// @{\n bool IsValidated = false;\n /// @}\n};\n\nclass CoreModuleSection : public Section {\npublic:\n /// Getter of content.\n const Module &getContent() const noexcept { return Content; }\n Module &getContent() noexcept { return Content; }\n\nprivate:\n Module Content;\n};\n\nnamespace Component {\n\n", "meta": {"hash_id": "73938b53f77f46974dc1b2492c1092cac25c4d0949593ca5fc7dd061c56587c8"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 5, "content": "class Component {\n using Section =\n std::variant;\n\npublic:\n /// Getter of magic vector.\n const std::vector &getMagic() const noexcept { return Magic; }\n std::vector &getMagic() noexcept { return Magic; }\n\n /// Getter of version vector.\n const std::vector &getVersion() const noexcept { return Version; }\n std::vector &getVersion() noexcept { return Version; }\n\n", "meta": {"hash_id": "a4d9f48bfc8d45373dcb38ccc92c622e9245a67db1be5bcdc5837dbdd97c1e3b"}}, {"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 6, "content": " /// Getter of layer vector.\n const std::vector &getLayer() const noexcept { return Layer; }\n std::vector &getLayer() noexcept { return Layer; }\n\n std::vector
      &getSections() noexcept { return Secs; }\n Span getSections() const noexcept { return Secs; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n std::vector Layer;\n\n std::vector
      Secs;\n /// @}\n};\n\n} // namespace Component\n\n} // namespace AST\n} // namespace WasmEdge\n", "meta": {"hash_id": "bae55e05dd5a1a51b0b38f1307b9386c12169304cb2c530e50bddfb5d745c4c0"}}]}], "golden_chunks": [{"doc_uuid": "639d7de912ea6b55610ce64b56e988bd9483b9e8efa260eb7d62326b46b17304", "index": 3, "content": " CodeSection &getCodeSection() { return CodeSec; }\n const DataSection &getDataSection() const { return DataSec; }\n DataSection &getDataSection() { return DataSec; }\n const DataCountSection &getDataCountSection() const { return DataCountSec; }\n DataCountSection &getDataCountSection() { return DataCountSec; }\n const AOTSection &getAOTSection() const { return AOTSec; }\n AOTSection &getAOTSection() { return AOTSec; }\n\n /// Getter and setter of compiled symbol.\n const auto &getSymbol() const noexcept { return IntrSymbol; }\n void setSymbol(Symbol S) noexcept {\n IntrSymbol = std::move(S);\n }\n\n /// Getter and setter of validated flag.\n bool getIsValidated() const noexcept { return IsValidated; }\n void setIsValidated(bool V = true) noexcept { IsValidated = V; }\n\nprivate:\n /// \\name Data of Module node.\n /// @{\n std::vector Magic;\n std::vector Version;\n /// @}\n\n", "meta": {"hash_id": "cda2a45fffdfbcaa6094ae3230a1088f1b5ea099cb34cc57ccc54b122ea609a7"}}], "meta": {"doctype": "codebase", "relative_path": "/include/ast/module.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "How can I retrieve the current log level using the provided classes?", "answer": "To retrieve the current log level, you can use the AVLogGetLevel class. It has a body method that takes no parameters and returns an int32_t value representing the current log level. For example:\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n", "golden_doc_uuids": ["21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac"], "golden_chunk_uuids": [["21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", 0]], "golden_documents": [{"uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\nclass AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetDouble : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetDouble(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetImageSize : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetImageSize(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetPixelFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetPixelFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetSampleFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetSampleFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\nclass AVUtilVersion : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilVersion(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &);\n};\n\nclass AVGetChannelLayoutNbChannels\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNbChannels(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutNameLen\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNameLen(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutName\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutName(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId, uint32_t NamePtr,\n uint32_t NameLen);\n};\n\nclass AVGetChannelLayoutMask\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutMask(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetDefaultChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetDefaultChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n int32_t ChannelLayoutId);\n};\n\nclass AVUtilConfigurationLength\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfigurationLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilConfiguration : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfiguration(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t ConfigPtr,\n uint32_t ConfigLen);\n};\n\nclass AVUtilLicenseLength : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicenseLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilLicense : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicense(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t LicensePtr,\n uint32_t LicenseLen);\n};\n\n} // namespace AVUtil\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avutil/avutil_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 0, "content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\n", "meta": {"hash_id": "8766a5c4aecc8dc1f0c30053ec5d8ed5311fae2d28883792e6c570f7b9d0ba99"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 1, "content": "class AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "2b41be752bc1457ee8b3da10031e760e0b2360053b98e88c81eb10338bb53805"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 2, "content": "class AVOptSetDouble : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetDouble(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetImageSize : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetImageSize(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "1febc021e50b935ab9b8e87cd9c01eda147025c41161a5ad0433196c9fd2880e"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 3, "content": "class AVOptSetPixelFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetPixelFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetSampleFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetSampleFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "0f373ddf29a8499473113dc19fb60c535109ff5a64ac6ececc09e62981b4163a"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 4, "content": "class AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\n", "meta": {"hash_id": "827c371f07a6a561e232c8fa9bcdfd398ba6cb42de5f00619d13b652cb4740e1"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 5, "content": "class AVUtilVersion : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilVersion(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &);\n};\n\nclass AVGetChannelLayoutNbChannels\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNbChannels(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutNameLen\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNameLen(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n", "meta": {"hash_id": "af043d30a2ff1deaad59744b68e79753ea1d85624db1a5c4740c448d20802192"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 6, "content": "class AVGetChannelLayoutName\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutName(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId, uint32_t NamePtr,\n uint32_t NameLen);\n};\n\nclass AVGetChannelLayoutMask\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutMask(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n", "meta": {"hash_id": "c11b9f5a947c6ebc92475b2081e72ebc86632f441a441808751134a5b29b3935"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 7, "content": "class AVGetDefaultChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetDefaultChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n int32_t ChannelLayoutId);\n};\n\nclass AVUtilConfigurationLength\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfigurationLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "fcc335a77ba3c156d6b8daebcd16e0a802ae8b84ca70c5a67266c2296e074b76"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 8, "content": "class AVUtilConfiguration : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfiguration(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t ConfigPtr,\n uint32_t ConfigLen);\n};\n\nclass AVUtilLicenseLength : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicenseLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilLicense : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicense(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t LicensePtr,\n uint32_t LicenseLen);\n};\n\n} // namespace AVUtil\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "15ed3b331dfb6a9588b45c24c574dc4f70a8433a8e735fceb16354948d063b7a"}}]}], "golden_chunks": [{"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 0, "content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\n", "meta": {"hash_id": "8766a5c4aecc8dc1f0c30053ec5d8ed5311fae2d28883792e6c570f7b9d0ba99"}}], "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avutil/avutil_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "How can I set an integer option value using the provided classes?", "answer": "To set an integer option value, you can use the AVOptSetInt class. It has a body method that takes a Runtime::CallingFrame reference as a parameter and returns an int32_t value, likely indicating the success or failure of setting the integer option. For example:\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n", "golden_doc_uuids": ["21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac"], "golden_chunk_uuids": [["21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", 1]], "golden_documents": [{"uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\nclass AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetDouble : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetDouble(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetImageSize : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetImageSize(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetPixelFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetPixelFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetSampleFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetSampleFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\nclass AVUtilVersion : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilVersion(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &);\n};\n\nclass AVGetChannelLayoutNbChannels\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNbChannels(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutNameLen\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNameLen(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutName\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutName(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId, uint32_t NamePtr,\n uint32_t NameLen);\n};\n\nclass AVGetChannelLayoutMask\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutMask(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetDefaultChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetDefaultChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n int32_t ChannelLayoutId);\n};\n\nclass AVUtilConfigurationLength\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfigurationLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilConfiguration : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfiguration(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t ConfigPtr,\n uint32_t ConfigLen);\n};\n\nclass AVUtilLicenseLength : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicenseLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilLicense : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicense(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t LicensePtr,\n uint32_t LicenseLen);\n};\n\n} // namespace AVUtil\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avutil/avutil_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 0, "content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\n", "meta": {"hash_id": "8766a5c4aecc8dc1f0c30053ec5d8ed5311fae2d28883792e6c570f7b9d0ba99"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 1, "content": "class AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "2b41be752bc1457ee8b3da10031e760e0b2360053b98e88c81eb10338bb53805"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 2, "content": "class AVOptSetDouble : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetDouble(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetImageSize : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetImageSize(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "1febc021e50b935ab9b8e87cd9c01eda147025c41161a5ad0433196c9fd2880e"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 3, "content": "class AVOptSetPixelFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetPixelFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetSampleFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetSampleFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "0f373ddf29a8499473113dc19fb60c535109ff5a64ac6ececc09e62981b4163a"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 4, "content": "class AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\n", "meta": {"hash_id": "827c371f07a6a561e232c8fa9bcdfd398ba6cb42de5f00619d13b652cb4740e1"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 5, "content": "class AVUtilVersion : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilVersion(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &);\n};\n\nclass AVGetChannelLayoutNbChannels\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNbChannels(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutNameLen\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNameLen(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n", "meta": {"hash_id": "af043d30a2ff1deaad59744b68e79753ea1d85624db1a5c4740c448d20802192"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 6, "content": "class AVGetChannelLayoutName\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutName(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId, uint32_t NamePtr,\n uint32_t NameLen);\n};\n\nclass AVGetChannelLayoutMask\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutMask(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n", "meta": {"hash_id": "c11b9f5a947c6ebc92475b2081e72ebc86632f441a441808751134a5b29b3935"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 7, "content": "class AVGetDefaultChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetDefaultChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n int32_t ChannelLayoutId);\n};\n\nclass AVUtilConfigurationLength\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfigurationLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "fcc335a77ba3c156d6b8daebcd16e0a802ae8b84ca70c5a67266c2296e074b76"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 8, "content": "class AVUtilConfiguration : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfiguration(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t ConfigPtr,\n uint32_t ConfigLen);\n};\n\nclass AVUtilLicenseLength : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicenseLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilLicense : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicense(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t LicensePtr,\n uint32_t LicenseLen);\n};\n\n} // namespace AVUtil\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "15ed3b331dfb6a9588b45c24c574dc4f70a8433a8e735fceb16354948d063b7a"}}]}], "golden_chunks": [{"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 1, "content": "class AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "2b41be752bc1457ee8b3da10031e760e0b2360053b98e88c81eb10338bb53805"}}], "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avutil/avutil_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the AVRescaleQ class?", "answer": "The AVRescaleQ class is used to rescale a 64-bit integer value using rational numbers. It has a body method that takes several parameters:\n\nExpect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n\n- A: The 64-bit integer value to be rescaled.\n- BNum and BDen: The numerator and denominator of the first rational number.\n- CNum and CDen: The numerator and denominator of the second rational number.\nThe method returns the rescaled 64-bit integer value.", "golden_doc_uuids": ["21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac"], "golden_chunk_uuids": [["21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", 4]], "golden_documents": [{"uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\nclass AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetDouble : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetDouble(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetImageSize : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetImageSize(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetPixelFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetPixelFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetSampleFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetSampleFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\nclass AVUtilVersion : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilVersion(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &);\n};\n\nclass AVGetChannelLayoutNbChannels\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNbChannels(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutNameLen\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNameLen(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutName\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutName(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId, uint32_t NamePtr,\n uint32_t NameLen);\n};\n\nclass AVGetChannelLayoutMask\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutMask(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetDefaultChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetDefaultChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n int32_t ChannelLayoutId);\n};\n\nclass AVUtilConfigurationLength\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfigurationLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilConfiguration : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfiguration(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t ConfigPtr,\n uint32_t ConfigLen);\n};\n\nclass AVUtilLicenseLength : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicenseLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilLicense : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicense(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t LicensePtr,\n uint32_t LicenseLen);\n};\n\n} // namespace AVUtil\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avutil/avutil_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 0, "content": "#pragma once\n#include \"avutil_base.h\"\n\n#include \"runtime/callingframe.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVUtil {\n\nclass AVLogSetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t LogLevelId);\n};\n\nclass AVLogGetLevel : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetLevel(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVLogSetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogSetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int32_t FlagsId);\n};\n\n", "meta": {"hash_id": "8766a5c4aecc8dc1f0c30053ec5d8ed5311fae2d28883792e6c570f7b9d0ba99"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 1, "content": "class AVLogGetFlags : public WasmEdgeFFmpegAVUtil {\npublic:\n AVLogGetFlags(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n// Option funcs.\nclass AVOptSetBin : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetBin(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSet : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSet(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetInt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetInt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "2b41be752bc1457ee8b3da10031e760e0b2360053b98e88c81eb10338bb53805"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 2, "content": "class AVOptSetDouble : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetDouble(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetImageSize : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetImageSize(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "1febc021e50b935ab9b8e87cd9c01eda147025c41161a5ad0433196c9fd2880e"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 3, "content": "class AVOptSetPixelFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetPixelFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVOptSetSampleFmt : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetSampleFmt(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "0f373ddf29a8499473113dc19fb60c535109ff5a64ac6ececc09e62981b4163a"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 4, "content": "class AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\n", "meta": {"hash_id": "827c371f07a6a561e232c8fa9bcdfd398ba6cb42de5f00619d13b652cb4740e1"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 5, "content": "class AVUtilVersion : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilVersion(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &);\n};\n\nclass AVGetChannelLayoutNbChannels\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNbChannels(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\nclass AVGetChannelLayoutNameLen\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutNameLen(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n", "meta": {"hash_id": "af043d30a2ff1deaad59744b68e79753ea1d85624db1a5c4740c448d20802192"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 6, "content": "class AVGetChannelLayoutName\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutName(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId, uint32_t NamePtr,\n uint32_t NameLen);\n};\n\nclass AVGetChannelLayoutMask\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetChannelLayoutMask(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n uint64_t ChannelLayoutId);\n};\n\n", "meta": {"hash_id": "c11b9f5a947c6ebc92475b2081e72ebc86632f441a441808751134a5b29b3935"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 7, "content": "class AVGetDefaultChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVGetDefaultChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame,\n int32_t ChannelLayoutId);\n};\n\nclass AVUtilConfigurationLength\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfigurationLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\n", "meta": {"hash_id": "fcc335a77ba3c156d6b8daebcd16e0a802ae8b84ca70c5a67266c2296e074b76"}}, {"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 8, "content": "class AVUtilConfiguration : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilConfiguration(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t ConfigPtr,\n uint32_t ConfigLen);\n};\n\nclass AVUtilLicenseLength : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicenseLength(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVUtilLicense : public WasmEdgeFFmpegAVUtil {\npublic:\n AVUtilLicense(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, uint32_t LicensePtr,\n uint32_t LicenseLen);\n};\n\n} // namespace AVUtil\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "15ed3b331dfb6a9588b45c24c574dc4f70a8433a8e735fceb16354948d063b7a"}}]}], "golden_chunks": [{"doc_uuid": "21b3edb89205c4e10d73fa5d7c1e90bea2090310ab0795fcddf5b4673d5478ac", "index": 4, "content": "class AVOptSetChannelLayout\n : public WasmEdgeFFmpegAVUtil {\npublic:\n AVOptSetChannelLayout(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame);\n};\n\nclass AVRescaleQ : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQ(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &Frame, int64_t A,\n int32_t BNum, int32_t BDen, int32_t CNum, int32_t CDen);\n};\n\nclass AVRescaleQRnd : public WasmEdgeFFmpegAVUtil {\npublic:\n AVRescaleQRnd(std::shared_ptr HostEnv)\n : WasmEdgeFFmpegAVUtil(HostEnv) {}\n Expect body(const Runtime::CallingFrame &, int64_t A, int32_t BNum,\n int32_t BDen, int32_t CNum, int32_t CDen,\n int32_t RoundingId);\n};\n\n", "meta": {"hash_id": "827c371f07a6a561e232c8fa9bcdfd398ba6cb42de5f00619d13b652cb4740e1"}}], "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avutil/avutil_func.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the WasmEdge::PO namespace?", "answer": "Based on the code, the WasmEdge::PO namespace likely stands for \"WasmEdge Parser Options\". It contains an Error class and an ErrCode enum, suggesting it is used for handling argument parsing errors in WasmEdge.", "golden_doc_uuids": ["58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0"], "golden_chunk_uuids": [["58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", 0]], "golden_documents": [{"uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/include/po/error.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n", "meta": {"hash_id": "a482dc70629cff85027ed485f43c3ae8d5ea14959a0e3ab4b27c841448648323"}}, {"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 1, "content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"hash_id": "ee7a135458e8d1c2dff92e29fbc3490cd1e21ad1114e883064f1d7d9abcf99fe"}}]}], "golden_chunks": [{"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n", "meta": {"hash_id": "a482dc70629cff85027ed485f43c3ae8d5ea14959a0e3ab4b27c841448648323"}}], "meta": {"doctype": "codebase", "relative_path": "/include/po/error.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What error codes are defined in the ErrCode enum?", "answer": "The ErrCode enum defines two error codes:\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nThese likely correspond to different types of argument parsing errors, such as an invalid argument value or an argument value that is out of an expected range.", "golden_doc_uuids": ["58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0"], "golden_chunk_uuids": [["58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", 0]], "golden_documents": [{"uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/include/po/error.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n", "meta": {"hash_id": "a482dc70629cff85027ed485f43c3ae8d5ea14959a0e3ab4b27c841448648323"}}, {"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 1, "content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"hash_id": "ee7a135458e8d1c2dff92e29fbc3490cd1e21ad1114e883064f1d7d9abcf99fe"}}]}], "golden_chunks": [{"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n", "meta": {"hash_id": "a482dc70629cff85027ed485f43c3ae8d5ea14959a0e3ab4b27c841448648323"}}], "meta": {"doctype": "codebase", "relative_path": "/include/po/error.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What data does the Error class store?", "answer": "The Error class has two private member variables:\n\nErrCode Code;\nstd::string Message;\n\nCode stores the ErrCode value indicating the type of error, while Message stores a string with a description of the error.", "golden_doc_uuids": ["58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0"], "golden_chunk_uuids": [["58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", 1]], "golden_documents": [{"uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/include/po/error.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n", "meta": {"hash_id": "a482dc70629cff85027ed485f43c3ae8d5ea14959a0e3ab4b27c841448648323"}}, {"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 1, "content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"hash_id": "ee7a135458e8d1c2dff92e29fbc3490cd1e21ad1114e883064f1d7d9abcf99fe"}}]}], "golden_chunks": [{"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 1, "content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"hash_id": "ee7a135458e8d1c2dff92e29fbc3490cd1e21ad1114e883064f1d7d9abcf99fe"}}], "meta": {"doctype": "codebase", "relative_path": "/include/po/error.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How can I construct an Error object?", "answer": "The Error class has a constructor that takes an ErrCode and a std::string:\n\nError(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n\nSo you can construct an Error object by passing the appropriate ErrCode and error message string, e.g.:\n\nError err(ErrCode::InvalidArgument, \"Invalid argument value\");\n", "golden_doc_uuids": ["58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0"], "golden_chunk_uuids": [["58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", 1]], "golden_documents": [{"uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/include/po/error.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n", "meta": {"hash_id": "a482dc70629cff85027ed485f43c3ae8d5ea14959a0e3ab4b27c841448648323"}}, {"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 1, "content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"hash_id": "ee7a135458e8d1c2dff92e29fbc3490cd1e21ad1114e883064f1d7d9abcf99fe"}}]}], "golden_chunks": [{"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 1, "content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"hash_id": "ee7a135458e8d1c2dff92e29fbc3490cd1e21ad1114e883064f1d7d9abcf99fe"}}], "meta": {"doctype": "codebase", "relative_path": "/include/po/error.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How can I access the error code and message of an Error object?", "answer": "The Error class provides accessor methods for the error code and message:\n\nErrCode code() const noexcept { return Code; }\nstd::string_view message() const &noexcept { return Message; }\nstd::string message() &&noexcept { return std::move(Message); }\n\ncode() returns the ErrCode value, while message() returns either a string_view (if called on a const reference) or the moved string (if called on an rvalue) containing the error message.", "golden_doc_uuids": ["58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0"], "golden_chunk_uuids": [["58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", 1]], "golden_documents": [{"uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/include/po/error.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n//===-- wasmedge/po/parser.h - Argument error -----------------------------===//\n//\n// Part of the WasmEdge Project.\n//\n//===----------------------------------------------------------------------===//\n#pragma once\n\n#include \"experimental/expected.hpp\"\n#include \n#include \n#include \n\nnamespace WasmEdge {\nnamespace PO {\n\nenum class ErrCode {\n InvalidArgument,\n OutOfRange,\n};\n\nclass Error {\npublic:\n Error(const Error &) = default;\n Error &operator=(const Error &) = default;\n Error(Error &&) noexcept = default;\n Error &operator=(Error &&) noexcept = default;\n\n", "meta": {"hash_id": "a482dc70629cff85027ed485f43c3ae8d5ea14959a0e3ab4b27c841448648323"}}, {"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 1, "content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"hash_id": "ee7a135458e8d1c2dff92e29fbc3490cd1e21ad1114e883064f1d7d9abcf99fe"}}]}], "golden_chunks": [{"doc_uuid": "58b8335c96ff71630b51a2e5877ca480117c0e39660bb7d672c352a610c23df0", "index": 1, "content": " Error(ErrCode C, std::string M) noexcept : Code(C), Message(std::move(M)) {}\n ErrCode code() const noexcept { return Code; }\n std::string_view message() const &noexcept { return Message; }\n std::string message() &&noexcept { return std::move(Message); }\n\nprivate:\n ErrCode Code;\n std::string Message;\n};\n\n} // namespace PO\n} // namespace WasmEdge\n", "meta": {"hash_id": "ee7a135458e8d1c2dff92e29fbc3490cd1e21ad1114e883064f1d7d9abcf99fe"}}], "meta": {"doctype": "codebase", "relative_path": "/include/po/error.h", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How can I set the ID of a specific chapter using the provided functions?", "answer": "You can set the ID of a specific chapter using the AVChapterSetId function. It takes the AVFormatContext ID, chapter index, and the new chapter ID as input parameters. Similar to AVChapterId, it fetches the AVFormatContext pointer, retrieves the chapters array, iterates to the specified chapter index, and sets the ID of the selected chapter to the provided ChapterId value.\n\nThis code snippet shows how it sets the chapter ID:\n\n (*AvChapter)->id = ChapterId;\n", "golden_doc_uuids": ["0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4"], "golden_chunk_uuids": [["0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", 1], ["0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", 0]], "golden_documents": [{"uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\nExpect AVChapterSetStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n int64_t StartValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->start = StartValue;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->end;\n}\n\nExpect AVChapterSetEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t EndValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->end = EndValue;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterMetadata::body(const Runtime::CallingFrame &Frame,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, uint32_t DictPtr) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(DictId, MemInst, uint32_t, DictPtr,\n \"Failed when accessing the return AVDictionary memory\"sv);\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n\n AVDictionary **AvDictionary =\n static_cast(av_malloc(sizeof(AVDictionary *)));\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n *AvDictionary = (*AvChapter)->metadata;\n FFMPEG_PTR_STORE(AvDictionary, DictId);\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetMetadata::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n uint32_t DictId) {\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n FFMPEG_PTR_FETCH(AvDictionary, DictId, AVDictionary *);\n\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n if (AvDictionary == nullptr)\n (*AvChapter)->metadata = nullptr;\n else\n (*AvChapter)->metadata = *AvDictionary;\n return static_cast(ErrNo::Success);\n}\n\n} // namespace AVFormat\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avformat/avChapter.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 0, "content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n", "meta": {"hash_id": "8e7c835ae9109d36276ac8272252517a06d060617fac7daf4753bf19a7fd6a6f"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 1, "content": " FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n", "meta": {"hash_id": "336aee6912cc01096560bdb025f3a715d2c9ac1e81027e7db4d5f9aa1f5cd3d9"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 2, "content": " AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\n", "meta": {"hash_id": "3ec45beff37ecbeb3fd4b36ea534cdc289ad6211ed3c2b86f3fdda0b53941218"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 3, "content": "Expect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\n", "meta": {"hash_id": "33387addd6c661642b6554ea66741a5eda732c4d14cdb28dd228c5414708b6f3"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 4, "content": "Expect AVChapterSetStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n int64_t StartValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->start = StartValue;\n return static_cast(ErrNo::Success);\n}\n\n", "meta": {"hash_id": "a2865ca951c56f5a3185d1fb0be92dab699552fb37ee68edea862960956d9490"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 5, "content": "Expect AVChapterEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->end;\n}\n\n", "meta": {"hash_id": "765279676927a00a61e19396704adf132ba731f6a21f0458b37878e4bdd698bd"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 6, "content": "Expect AVChapterSetEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t EndValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->end = EndValue;\n return static_cast(ErrNo::Success);\n}\n\n", "meta": {"hash_id": "570d96a04595ade8239dc6c88c54ad6be96fe418bb90c060d22ec5ee5e826513"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 7, "content": "Expect AVChapterMetadata::body(const Runtime::CallingFrame &Frame,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, uint32_t DictPtr) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(DictId, MemInst, uint32_t, DictPtr,\n \"Failed when accessing the return AVDictionary memory\"sv);\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n\n AVDictionary **AvDictionary =\n static_cast(av_malloc(sizeof(AVDictionary *)));\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n", "meta": {"hash_id": "95c5b801159cf0c29acd450b650327965a9479fc4f08feb5a20f1bc149b68dbd"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 8, "content": " // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n *AvDictionary = (*AvChapter)->metadata;\n FFMPEG_PTR_STORE(AvDictionary, DictId);\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetMetadata::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n uint32_t DictId) {\n\n", "meta": {"hash_id": "2816a8cc46322b8036b2a41f884b6ffe072862c5bb8c4fff5286b16e1ddd0cf2"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 9, "content": " FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n FFMPEG_PTR_FETCH(AvDictionary, DictId, AVDictionary *);\n\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n if (AvDictionary == nullptr)\n (*AvChapter)->metadata = nullptr;\n else\n (*AvChapter)->metadata = *AvDictionary;\n return static_cast(ErrNo::Success);\n}\n\n} // namespace AVFormat\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "4cb8b7cab550919a8df7865ebdb4393fadbeb796c3f0e484bff845548b26e252"}}]}], "golden_chunks": [{"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 1, "content": " FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n", "meta": {"hash_id": "336aee6912cc01096560bdb025f3a715d2c9ac1e81027e7db4d5f9aa1f5cd3d9"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 0, "content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n", "meta": {"hash_id": "8e7c835ae9109d36276ac8272252517a06d060617fac7daf4753bf19a7fd6a6f"}}], "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avformat/avChapter.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "How can I set the time base of a specific chapter?", "answer": "You can set the time base of a specific chapter using the AVChapterSetTimebase function. It takes the AVFormatContext ID, chapter index, numerator, and denominator of the new time base as input parameters. It fetches the AVFormatContext pointer, creates an AVRational structure using the provided numerator and denominator values, retrieves the chapters array, iterates to the specified chapter index, and sets the time_base member of the selected chapter to the new AVRational value.\n\nThis code snippet shows how it sets the time base:\n\n AVRational const Timebase = av_make_q(Num, Den);\n ...\n (*AvChapter)->time_base = Timebase;\n", "golden_doc_uuids": ["0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4"], "golden_chunk_uuids": [["0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", 2]], "golden_documents": [{"uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\nExpect AVChapterSetStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n int64_t StartValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->start = StartValue;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->end;\n}\n\nExpect AVChapterSetEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t EndValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->end = EndValue;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterMetadata::body(const Runtime::CallingFrame &Frame,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, uint32_t DictPtr) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(DictId, MemInst, uint32_t, DictPtr,\n \"Failed when accessing the return AVDictionary memory\"sv);\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n\n AVDictionary **AvDictionary =\n static_cast(av_malloc(sizeof(AVDictionary *)));\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n *AvDictionary = (*AvChapter)->metadata;\n FFMPEG_PTR_STORE(AvDictionary, DictId);\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetMetadata::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n uint32_t DictId) {\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n FFMPEG_PTR_FETCH(AvDictionary, DictId, AVDictionary *);\n\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n if (AvDictionary == nullptr)\n (*AvChapter)->metadata = nullptr;\n else\n (*AvChapter)->metadata = *AvDictionary;\n return static_cast(ErrNo::Success);\n}\n\n} // namespace AVFormat\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avformat/avChapter.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 0, "content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n", "meta": {"hash_id": "8e7c835ae9109d36276ac8272252517a06d060617fac7daf4753bf19a7fd6a6f"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 1, "content": " FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n", "meta": {"hash_id": "336aee6912cc01096560bdb025f3a715d2c9ac1e81027e7db4d5f9aa1f5cd3d9"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 2, "content": " AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\n", "meta": {"hash_id": "3ec45beff37ecbeb3fd4b36ea534cdc289ad6211ed3c2b86f3fdda0b53941218"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 3, "content": "Expect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\n", "meta": {"hash_id": "33387addd6c661642b6554ea66741a5eda732c4d14cdb28dd228c5414708b6f3"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 4, "content": "Expect AVChapterSetStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n int64_t StartValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->start = StartValue;\n return static_cast(ErrNo::Success);\n}\n\n", "meta": {"hash_id": "a2865ca951c56f5a3185d1fb0be92dab699552fb37ee68edea862960956d9490"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 5, "content": "Expect AVChapterEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->end;\n}\n\n", "meta": {"hash_id": "765279676927a00a61e19396704adf132ba731f6a21f0458b37878e4bdd698bd"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 6, "content": "Expect AVChapterSetEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t EndValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->end = EndValue;\n return static_cast(ErrNo::Success);\n}\n\n", "meta": {"hash_id": "570d96a04595ade8239dc6c88c54ad6be96fe418bb90c060d22ec5ee5e826513"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 7, "content": "Expect AVChapterMetadata::body(const Runtime::CallingFrame &Frame,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, uint32_t DictPtr) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(DictId, MemInst, uint32_t, DictPtr,\n \"Failed when accessing the return AVDictionary memory\"sv);\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n\n AVDictionary **AvDictionary =\n static_cast(av_malloc(sizeof(AVDictionary *)));\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n", "meta": {"hash_id": "95c5b801159cf0c29acd450b650327965a9479fc4f08feb5a20f1bc149b68dbd"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 8, "content": " // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n *AvDictionary = (*AvChapter)->metadata;\n FFMPEG_PTR_STORE(AvDictionary, DictId);\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetMetadata::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n uint32_t DictId) {\n\n", "meta": {"hash_id": "2816a8cc46322b8036b2a41f884b6ffe072862c5bb8c4fff5286b16e1ddd0cf2"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 9, "content": " FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n FFMPEG_PTR_FETCH(AvDictionary, DictId, AVDictionary *);\n\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n if (AvDictionary == nullptr)\n (*AvChapter)->metadata = nullptr;\n else\n (*AvChapter)->metadata = *AvDictionary;\n return static_cast(ErrNo::Success);\n}\n\n} // namespace AVFormat\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "4cb8b7cab550919a8df7865ebdb4393fadbeb796c3f0e484bff845548b26e252"}}]}], "golden_chunks": [{"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 2, "content": " AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\n", "meta": {"hash_id": "3ec45beff37ecbeb3fd4b36ea534cdc289ad6211ed3c2b86f3fdda0b53941218"}}], "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avformat/avChapter.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "What does the AVChapterStart function do?", "answer": "The AVChapterStart function retrieves the start time of a specific chapter in an AVFormatContext. It takes the AVFormatContext ID and chapter index as input parameters. It fetches the AVFormatContext pointer, retrieves the chapters array, iterates to the specified chapter index, and returns the start member of the selected chapter, which represents the start time of the chapter.\n\nThis code snippet shows how it retrieves the start time:\n\n return static_cast(*AvChapter)->start;\n", "golden_doc_uuids": ["0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4"], "golden_chunk_uuids": [["0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", 3]], "golden_documents": [{"uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\nExpect AVChapterSetStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n int64_t StartValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->start = StartValue;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->end;\n}\n\nExpect AVChapterSetEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t EndValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->end = EndValue;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterMetadata::body(const Runtime::CallingFrame &Frame,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, uint32_t DictPtr) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(DictId, MemInst, uint32_t, DictPtr,\n \"Failed when accessing the return AVDictionary memory\"sv);\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n\n AVDictionary **AvDictionary =\n static_cast(av_malloc(sizeof(AVDictionary *)));\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n *AvDictionary = (*AvChapter)->metadata;\n FFMPEG_PTR_STORE(AvDictionary, DictId);\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetMetadata::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n uint32_t DictId) {\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n FFMPEG_PTR_FETCH(AvDictionary, DictId, AVDictionary *);\n\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n if (AvDictionary == nullptr)\n (*AvChapter)->metadata = nullptr;\n else\n (*AvChapter)->metadata = *AvDictionary;\n return static_cast(ErrNo::Success);\n}\n\n} // namespace AVFormat\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avformat/avChapter.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 0, "content": "#include \"avChapter.h\"\n\nextern \"C\" {\n#include \"libavformat/avformat.h\"\n}\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasmEdgeFFmpeg {\nnamespace AVFormat {\n\nExpect AVChapterId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId, uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->id;\n}\n\nExpect AVChapterSetId::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t ChapterId) {\n\n", "meta": {"hash_id": "8e7c835ae9109d36276ac8272252517a06d060617fac7daf4753bf19a7fd6a6f"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 1, "content": " FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->id = ChapterId;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterTimebase::body(const Runtime::CallingFrame &Frame,\n uint32_t NumPtr, uint32_t DenPtr,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(Num, MemInst, int32_t, NumPtr, \"\");\n MEM_PTR_CHECK(Den, MemInst, int32_t, DenPtr, \"\");\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n", "meta": {"hash_id": "336aee6912cc01096560bdb025f3a715d2c9ac1e81027e7db4d5f9aa1f5cd3d9"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 2, "content": " AVRational const AvRational = static_cast(*AvChapter)->time_base;\n *Num = AvRational.num;\n *Den = AvRational.den;\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetTimebase::body(const Runtime::CallingFrame &,\n int32_t Num, int32_t Den,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVRational const Timebase = av_make_q(Num, Den);\n\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->time_base = Timebase;\n return static_cast(ErrNo::Success);\n}\n\n", "meta": {"hash_id": "3ec45beff37ecbeb3fd4b36ea534cdc289ad6211ed3c2b86f3fdda0b53941218"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 3, "content": "Expect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\n", "meta": {"hash_id": "33387addd6c661642b6554ea66741a5eda732c4d14cdb28dd228c5414708b6f3"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 4, "content": "Expect AVChapterSetStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n int64_t StartValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->start = StartValue;\n return static_cast(ErrNo::Success);\n}\n\n", "meta": {"hash_id": "a2865ca951c56f5a3185d1fb0be92dab699552fb37ee68edea862960956d9490"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 5, "content": "Expect AVChapterEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->end;\n}\n\n", "meta": {"hash_id": "765279676927a00a61e19396704adf132ba731f6a21f0458b37878e4bdd698bd"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 6, "content": "Expect AVChapterSetEnd::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, int64_t EndValue) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n (*AvChapter)->end = EndValue;\n return static_cast(ErrNo::Success);\n}\n\n", "meta": {"hash_id": "570d96a04595ade8239dc6c88c54ad6be96fe418bb90c060d22ec5ee5e826513"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 7, "content": "Expect AVChapterMetadata::body(const Runtime::CallingFrame &Frame,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx, uint32_t DictPtr) {\n\n MEMINST_CHECK(MemInst, Frame, 0);\n MEM_PTR_CHECK(DictId, MemInst, uint32_t, DictPtr,\n \"Failed when accessing the return AVDictionary memory\"sv);\n\n FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n\n AVDictionary **AvDictionary =\n static_cast(av_malloc(sizeof(AVDictionary *)));\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n", "meta": {"hash_id": "95c5b801159cf0c29acd450b650327965a9479fc4f08feb5a20f1bc149b68dbd"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 8, "content": " // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n *AvDictionary = (*AvChapter)->metadata;\n FFMPEG_PTR_STORE(AvDictionary, DictId);\n return static_cast(ErrNo::Success);\n}\n\nExpect AVChapterSetMetadata::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx,\n uint32_t DictId) {\n\n", "meta": {"hash_id": "2816a8cc46322b8036b2a41f884b6ffe072862c5bb8c4fff5286b16e1ddd0cf2"}}, {"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 9, "content": " FFMPEG_PTR_FETCH(AvFormatCtx, AvFormatCtxId, AVFormatContext);\n FFMPEG_PTR_FETCH(AvDictionary, DictId, AVDictionary *);\n\n AVChapter **AvChapter = AvFormatCtx->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n if (AvDictionary == nullptr)\n (*AvChapter)->metadata = nullptr;\n else\n (*AvChapter)->metadata = *AvDictionary;\n return static_cast(ErrNo::Success);\n}\n\n} // namespace AVFormat\n} // namespace WasmEdgeFFmpeg\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "4cb8b7cab550919a8df7865ebdb4393fadbeb796c3f0e484bff845548b26e252"}}]}], "golden_chunks": [{"doc_uuid": "0396205d1c4798d535adf488d31a205be75de55cc26b35efd5312ff423f7e7f4", "index": 3, "content": "Expect AVChapterStart::body(const Runtime::CallingFrame &,\n uint32_t AvFormatCtxId,\n uint32_t ChapterIdx) {\n\n FFMPEG_PTR_FETCH(AvFormatContext, AvFormatCtxId, AVFormatContext);\n AVChapter **AvChapter = AvFormatContext->chapters;\n\n // No check here (Check)\n // Raw Pointer Iteration.\n for (unsigned int I = 1; I <= ChapterIdx; I++)\n AvChapter++;\n\n return static_cast(*AvChapter)->start;\n}\n\n", "meta": {"hash_id": "33387addd6c661642b6554ea66741a5eda732c4d14cdb28dd228c5414708b6f3"}}], "meta": {"doctype": "codebase", "relative_path": "/plugins/wasmedge_ffmpeg/avformat/avChapter.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `importPk` function?", "answer": "The `importPk` function is used to import a public key from an encoded representation. It takes the algorithm type (`AsymmetricCommon::Algorithm`), the encoded public key data (`Span Encoded`), and the encoding format (`__wasi_publickey_encoding_e_t Encoding`) as parameters. It uses `std::visit` to call the appropriate `PublicKey::import` function based on the algorithm type.", "golden_doc_uuids": ["ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26"], "golden_chunk_uuids": [["ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", 0]], "golden_documents": [{"uuid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"asymmetric_common/publickey.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace AsymmetricCommon {\n\nWasiCryptoExpect\nimportPk(AsymmetricCommon::Algorithm Alg, Span Encoded,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [=](auto Factory) noexcept -> WasiCryptoExpect {\n return decltype(Factory)::PublicKey::import(Encoded, Encoding);\n },\n Alg);\n}\n\nWasiCryptoExpect>\npkExportData(const PkVariant &PkVariant,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [Encoding](const auto &Pk) noexcept { return Pk.exportData(Encoding); },\n PkVariant);\n}\n\nWasiCryptoExpect pkVerify(const PkVariant &PkVariant) noexcept {\n return std::visit([](const auto &Pk) noexcept { return Pk.verify(); },\n PkVariant);\n}\n\n} // namespace AsymmetricCommon\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/plugins/wasi_crypto/asymmetric_common/publickey.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"asymmetric_common/publickey.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace AsymmetricCommon {\n\nWasiCryptoExpect\nimportPk(AsymmetricCommon::Algorithm Alg, Span Encoded,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [=](auto Factory) noexcept -> WasiCryptoExpect {\n return decltype(Factory)::PublicKey::import(Encoded, Encoding);\n },\n Alg);\n}\n\n", "meta": {"hash_id": "b8a5784c3544520175a542097709bfd8119a716ed2e859d6b79b9195b315c071"}}, {"doc_uuid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", "index": 1, "content": "WasiCryptoExpect>\npkExportData(const PkVariant &PkVariant,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [Encoding](const auto &Pk) noexcept { return Pk.exportData(Encoding); },\n PkVariant);\n}\n\nWasiCryptoExpect pkVerify(const PkVariant &PkVariant) noexcept {\n return std::visit([](const auto &Pk) noexcept { return Pk.verify(); },\n PkVariant);\n}\n\n} // namespace AsymmetricCommon\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "6d22eeee84b7c506b98797f40d499baf9280fc2f7b625c8addeea55f8a6cdef8"}}]}], "golden_chunks": [{"doc_uuid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"asymmetric_common/publickey.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace AsymmetricCommon {\n\nWasiCryptoExpect\nimportPk(AsymmetricCommon::Algorithm Alg, Span Encoded,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [=](auto Factory) noexcept -> WasiCryptoExpect {\n return decltype(Factory)::PublicKey::import(Encoded, Encoding);\n },\n Alg);\n}\n\n", "meta": {"hash_id": "b8a5784c3544520175a542097709bfd8119a716ed2e859d6b79b9195b315c071"}}], "meta": {"doctype": "codebase", "relative_path": "/plugins/wasi_crypto/asymmetric_common/publickey.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How does the `pkExportData` function work?", "answer": "The `pkExportData` function is used to export the public key data in a specified encoding format. It takes the public key variant (`const PkVariant &PkVariant`) and the desired encoding format (`__wasi_publickey_encoding_e_t Encoding`) as parameters. It uses `std::visit` to call the `exportData` function of the appropriate public key type based on the variant. The function returns the exported public key data as a `std::vector`.", "golden_doc_uuids": ["ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26"], "golden_chunk_uuids": [["ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", 1]], "golden_documents": [{"uuid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"asymmetric_common/publickey.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace AsymmetricCommon {\n\nWasiCryptoExpect\nimportPk(AsymmetricCommon::Algorithm Alg, Span Encoded,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [=](auto Factory) noexcept -> WasiCryptoExpect {\n return decltype(Factory)::PublicKey::import(Encoded, Encoding);\n },\n Alg);\n}\n\nWasiCryptoExpect>\npkExportData(const PkVariant &PkVariant,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [Encoding](const auto &Pk) noexcept { return Pk.exportData(Encoding); },\n PkVariant);\n}\n\nWasiCryptoExpect pkVerify(const PkVariant &PkVariant) noexcept {\n return std::visit([](const auto &Pk) noexcept { return Pk.verify(); },\n PkVariant);\n}\n\n} // namespace AsymmetricCommon\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"doctype": "codebase", "relative_path": "/plugins/wasi_crypto/asymmetric_common/publickey.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", "index": 0, "content": "// SPDX-License-Identifier: Apache-2.0\n// SPDX-FileCopyrightText: 2019-2022 Second State INC\n\n#include \"asymmetric_common/publickey.h\"\n\nnamespace WasmEdge {\nnamespace Host {\nnamespace WasiCrypto {\nnamespace AsymmetricCommon {\n\nWasiCryptoExpect\nimportPk(AsymmetricCommon::Algorithm Alg, Span Encoded,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [=](auto Factory) noexcept -> WasiCryptoExpect {\n return decltype(Factory)::PublicKey::import(Encoded, Encoding);\n },\n Alg);\n}\n\n", "meta": {"hash_id": "b8a5784c3544520175a542097709bfd8119a716ed2e859d6b79b9195b315c071"}}, {"doc_uuid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", "index": 1, "content": "WasiCryptoExpect>\npkExportData(const PkVariant &PkVariant,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [Encoding](const auto &Pk) noexcept { return Pk.exportData(Encoding); },\n PkVariant);\n}\n\nWasiCryptoExpect pkVerify(const PkVariant &PkVariant) noexcept {\n return std::visit([](const auto &Pk) noexcept { return Pk.verify(); },\n PkVariant);\n}\n\n} // namespace AsymmetricCommon\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "6d22eeee84b7c506b98797f40d499baf9280fc2f7b625c8addeea55f8a6cdef8"}}]}], "golden_chunks": [{"doc_uuid": "ed7a7ce904aa052de3a265fed9df5de8666341480bdf203a7e2f1b8761ea0c26", "index": 1, "content": "WasiCryptoExpect>\npkExportData(const PkVariant &PkVariant,\n __wasi_publickey_encoding_e_t Encoding) noexcept {\n return std::visit(\n [Encoding](const auto &Pk) noexcept { return Pk.exportData(Encoding); },\n PkVariant);\n}\n\nWasiCryptoExpect pkVerify(const PkVariant &PkVariant) noexcept {\n return std::visit([](const auto &Pk) noexcept { return Pk.verify(); },\n PkVariant);\n}\n\n} // namespace AsymmetricCommon\n} // namespace WasiCrypto\n} // namespace Host\n} // namespace WasmEdge\n", "meta": {"hash_id": "6d22eeee84b7c506b98797f40d499baf9280fc2f7b625c8addeea55f8a6cdef8"}}], "meta": {"doctype": "codebase", "relative_path": "/plugins/wasi_crypto/asymmetric_common/publickey.cpp", "repo_name": "WasmEdge/WasmEdge", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How do you create a new instance of the `Pty` struct?", "answer": "There are two ways to create a new instance of the `Pty` struct:\n1. Using the `new` function in the `pty` module:\n```rust\npub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result {\n conpty::new(config, window_size)\n .ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n}\n```\nThis function takes a configuration (`Options`), window size (`WindowSize`), and a window ID (unused), and returns a `Result`. It internally calls `conpty::new` to create the pty.\n\n2. Using the `new` method of the `Pty` struct:\n```rust\nfn new(\n backend: impl Into,\n conout: impl Into,\n conin: impl Into,\n child_watcher: ChildExitWatcher,\n) -> Self {\n Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }\n}\n```\nThis method takes a backend, output pipe, input pipe, and a child process watcher, and returns a new instance of `Pty`.", "golden_doc_uuids": ["8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94"], "golden_chunk_uuids": [["8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", 1]], "golden_documents": [{"uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "content": "use std::ffi::OsStr;\nuse std::io::{self, Error, ErrorKind, Result};\nuse std::iter::once;\nuse std::os::windows::ffi::OsStrExt;\nuse std::sync::mpsc::TryRecvError;\nuse std::sync::Arc;\n\nuse crate::event::{OnResize, WindowSize};\nuse crate::tty::windows::child::ChildExitWatcher;\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options, Shell};\n\nmod blocking;\nmod child;\nmod conpty;\n\nuse blocking::{UnblockedReader, UnblockedWriter};\nuse conpty::Conpty as Backend;\nuse miow::pipe::{AnonRead, AnonWrite};\nuse polling::{Event, Poller};\n\npub const PTY_CHILD_EVENT_TOKEN: usize = 1;\npub const PTY_READ_WRITE_TOKEN: usize = 2;\n\ntype ReadPipe = UnblockedReader;\ntype WritePipe = UnblockedWriter;\n\npub struct Pty {\n // XXX: Backend is required to be the first field, to ensure correct drop order. Dropping\n // `conout` before `backend` will cause a deadlock (with Conpty).\n backend: Backend,\n conout: ReadPipe,\n conin: WritePipe,\n child_watcher: ChildExitWatcher,\n}\n\npub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result {\n conpty::new(config, window_size)\n .ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n}\n\nimpl Pty {\n fn new(\n backend: impl Into,\n conout: impl Into,\n conin: impl Into,\n child_watcher: ChildExitWatcher,\n ) -> Self {\n Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }\n }\n\n pub fn child_watcher(&self) -> &ChildExitWatcher {\n &self.child_watcher\n }\n}\n\nfn with_key(mut event: Event, key: usize) -> Event {\n event.key = key;\n event\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = ReadPipe;\n type Writer = WritePipe;\n\n #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n Ok(())\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n Ok(())\n }\n\n #[inline]\n fn deregister(&mut self, _poll: &Arc) -> io::Result<()> {\n self.conin.deregister();\n self.conout.deregister();\n self.child_watcher.deregister();\n\n Ok(())\n }\n\n #[inline]\n fn reader(&mut self) -> &mut Self::Reader {\n &mut self.conout\n }\n\n #[inline]\n fn writer(&mut self) -> &mut Self::Writer {\n &mut self.conin\n }\n}\n\nimpl EventedPty for Pty {\n fn next_child_event(&mut self) -> Option {\n match self.child_watcher.event_rx().try_recv() {\n Ok(ev) => Some(ev),\n Err(TryRecvError::Empty) => None,\n Err(TryRecvError::Disconnected) => Some(ChildEvent::Exited(None)),\n }\n }\n}\n\nimpl OnResize for Pty {\n fn on_resize(&mut self, window_size: WindowSize) {\n self.backend.on_resize(window_size)\n }\n}\n\nfn cmdline(config: &Options) -> String {\n let default_shell = Shell::new(\"powershell\".to_owned(), Vec::new());\n let shell = config.shell.as_ref().unwrap_or(&default_shell);\n\n once(shell.program.as_str())\n .chain(shell.args.iter().map(|s| s.as_str()))\n .collect::>()\n .join(\" \")\n}\n\n/// Converts the string slice into a Windows-standard representation for \"W\"-\n/// suffixed function variants, which accept UTF-16 encoded string values.\npub fn win32_string + ?Sized>(value: &S) -> Vec {\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/tty/windows/mod.rs", "repo_name": "alacritty/alacritty", "num_chunks": 8, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 0, "content": "use std::ffi::OsStr;\nuse std::io::{self, Error, ErrorKind, Result};\nuse std::iter::once;\nuse std::os::windows::ffi::OsStrExt;\nuse std::sync::mpsc::TryRecvError;\nuse std::sync::Arc;\n\nuse crate::event::{OnResize, WindowSize};\nuse crate::tty::windows::child::ChildExitWatcher;\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options, Shell};\n\nmod blocking;\nmod child;\nmod conpty;\n\nuse blocking::{UnblockedReader, UnblockedWriter};\nuse conpty::Conpty as Backend;\nuse miow::pipe::{AnonRead, AnonWrite};\nuse polling::{Event, Poller};\n\npub const PTY_CHILD_EVENT_TOKEN: usize = 1;\npub const PTY_READ_WRITE_TOKEN: usize = 2;\n\ntype ReadPipe = UnblockedReader;\ntype WritePipe = UnblockedWriter;\n\n", "meta": {"hash_id": "c4dfe9598b1b0d265cd07715db093cb2dde57d95421695f27d71e954df813e40"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 1, "content": "pub struct Pty {\n // XXX: Backend is required to be the first field, to ensure correct drop order. Dropping\n // `conout` before `backend` will cause a deadlock (with Conpty).\n backend: Backend,\n conout: ReadPipe,\n conin: WritePipe,\n child_watcher: ChildExitWatcher,\n}\n\npub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result {\n conpty::new(config, window_size)\n .ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n}\n\n", "meta": {"hash_id": "41126b1f1e79a84d6829af425821739e357da0d70e44a5882e513e11c74abb2f"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 2, "content": "impl Pty {\n fn new(\n backend: impl Into,\n conout: impl Into,\n conin: impl Into,\n child_watcher: ChildExitWatcher,\n ) -> Self {\n Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }\n }\n\n pub fn child_watcher(&self) -> &ChildExitWatcher {\n &self.child_watcher\n }\n}\n\nfn with_key(mut event: Event, key: usize) -> Event {\n event.key = key;\n event\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = ReadPipe;\n type Writer = WritePipe;\n\n", "meta": {"hash_id": "c5ee0946a6b3dd81198a27892aed1620e5abe2fe04fb51fff71e9daf25f1da74"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 3, "content": " #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n", "meta": {"hash_id": "8a91bcbab9c661df0689ca9ee38e3ec3172666c791d7560d5bd2b97409cdc9c6"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 4, "content": " Ok(())\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n", "meta": {"hash_id": "5ee314f555663730e068647aa3c9ec04f5ca137c3788bbee9e221664deef2d20"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 5, "content": " Ok(())\n }\n\n #[inline]\n fn deregister(&mut self, _poll: &Arc) -> io::Result<()> {\n self.conin.deregister();\n self.conout.deregister();\n self.child_watcher.deregister();\n\n Ok(())\n }\n\n #[inline]\n fn reader(&mut self) -> &mut Self::Reader {\n &mut self.conout\n }\n\n #[inline]\n fn writer(&mut self) -> &mut Self::Writer {\n &mut self.conin\n }\n}\n\n", "meta": {"hash_id": "c298b55b09cce72347906751e7e639ec12bad59ee6fd9f940248f16005505b9a"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 6, "content": "impl EventedPty for Pty {\n fn next_child_event(&mut self) -> Option {\n match self.child_watcher.event_rx().try_recv() {\n Ok(ev) => Some(ev),\n Err(TryRecvError::Empty) => None,\n Err(TryRecvError::Disconnected) => Some(ChildEvent::Exited(None)),\n }\n }\n}\n\nimpl OnResize for Pty {\n fn on_resize(&mut self, window_size: WindowSize) {\n self.backend.on_resize(window_size)\n }\n}\n\nfn cmdline(config: &Options) -> String {\n let default_shell = Shell::new(\"powershell\".to_owned(), Vec::new());\n let shell = config.shell.as_ref().unwrap_or(&default_shell);\n\n", "meta": {"hash_id": "beb10893d1626de0b46db436bb55c9468b8f02553d95a80ed39ead448daa4288"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 7, "content": " once(shell.program.as_str())\n .chain(shell.args.iter().map(|s| s.as_str()))\n .collect::>()\n .join(\" \")\n}\n\n/// Converts the string slice into a Windows-standard representation for \"W\"-\n/// suffixed function variants, which accept UTF-16 encoded string values.\npub fn win32_string + ?Sized>(value: &S) -> Vec {\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n}\n", "meta": {"hash_id": "4e20ac0eb1834a2f07ce4ec07c272fcb9da959d899fa3924b8c565c661681634"}}]}], "golden_chunks": [{"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 1, "content": "pub struct Pty {\n // XXX: Backend is required to be the first field, to ensure correct drop order. Dropping\n // `conout` before `backend` will cause a deadlock (with Conpty).\n backend: Backend,\n conout: ReadPipe,\n conin: WritePipe,\n child_watcher: ChildExitWatcher,\n}\n\npub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result {\n conpty::new(config, window_size)\n .ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n}\n\n", "meta": {"hash_id": "41126b1f1e79a84d6829af425821739e357da0d70e44a5882e513e11c74abb2f"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/tty/windows/mod.rs", "repo_name": "alacritty/alacritty", "num_chunks": 8, "source": "codebases/jsonl"}} +{"query": "How can you access the child process watcher associated with a `Pty` instance?", "answer": "You can access the child process watcher associated with a `Pty` instance using the `child_watcher` method:\n```rust\npub fn child_watcher(&self) -> &ChildExitWatcher {\n &self.child_watcher\n}\n```\nThis method returns a reference to the `ChildExitWatcher` stored in the `Pty` struct.", "golden_doc_uuids": ["8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94"], "golden_chunk_uuids": [["8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", 2]], "golden_documents": [{"uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "content": "use std::ffi::OsStr;\nuse std::io::{self, Error, ErrorKind, Result};\nuse std::iter::once;\nuse std::os::windows::ffi::OsStrExt;\nuse std::sync::mpsc::TryRecvError;\nuse std::sync::Arc;\n\nuse crate::event::{OnResize, WindowSize};\nuse crate::tty::windows::child::ChildExitWatcher;\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options, Shell};\n\nmod blocking;\nmod child;\nmod conpty;\n\nuse blocking::{UnblockedReader, UnblockedWriter};\nuse conpty::Conpty as Backend;\nuse miow::pipe::{AnonRead, AnonWrite};\nuse polling::{Event, Poller};\n\npub const PTY_CHILD_EVENT_TOKEN: usize = 1;\npub const PTY_READ_WRITE_TOKEN: usize = 2;\n\ntype ReadPipe = UnblockedReader;\ntype WritePipe = UnblockedWriter;\n\npub struct Pty {\n // XXX: Backend is required to be the first field, to ensure correct drop order. Dropping\n // `conout` before `backend` will cause a deadlock (with Conpty).\n backend: Backend,\n conout: ReadPipe,\n conin: WritePipe,\n child_watcher: ChildExitWatcher,\n}\n\npub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result {\n conpty::new(config, window_size)\n .ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n}\n\nimpl Pty {\n fn new(\n backend: impl Into,\n conout: impl Into,\n conin: impl Into,\n child_watcher: ChildExitWatcher,\n ) -> Self {\n Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }\n }\n\n pub fn child_watcher(&self) -> &ChildExitWatcher {\n &self.child_watcher\n }\n}\n\nfn with_key(mut event: Event, key: usize) -> Event {\n event.key = key;\n event\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = ReadPipe;\n type Writer = WritePipe;\n\n #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n Ok(())\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n Ok(())\n }\n\n #[inline]\n fn deregister(&mut self, _poll: &Arc) -> io::Result<()> {\n self.conin.deregister();\n self.conout.deregister();\n self.child_watcher.deregister();\n\n Ok(())\n }\n\n #[inline]\n fn reader(&mut self) -> &mut Self::Reader {\n &mut self.conout\n }\n\n #[inline]\n fn writer(&mut self) -> &mut Self::Writer {\n &mut self.conin\n }\n}\n\nimpl EventedPty for Pty {\n fn next_child_event(&mut self) -> Option {\n match self.child_watcher.event_rx().try_recv() {\n Ok(ev) => Some(ev),\n Err(TryRecvError::Empty) => None,\n Err(TryRecvError::Disconnected) => Some(ChildEvent::Exited(None)),\n }\n }\n}\n\nimpl OnResize for Pty {\n fn on_resize(&mut self, window_size: WindowSize) {\n self.backend.on_resize(window_size)\n }\n}\n\nfn cmdline(config: &Options) -> String {\n let default_shell = Shell::new(\"powershell\".to_owned(), Vec::new());\n let shell = config.shell.as_ref().unwrap_or(&default_shell);\n\n once(shell.program.as_str())\n .chain(shell.args.iter().map(|s| s.as_str()))\n .collect::>()\n .join(\" \")\n}\n\n/// Converts the string slice into a Windows-standard representation for \"W\"-\n/// suffixed function variants, which accept UTF-16 encoded string values.\npub fn win32_string + ?Sized>(value: &S) -> Vec {\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/tty/windows/mod.rs", "repo_name": "alacritty/alacritty", "num_chunks": 8, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 0, "content": "use std::ffi::OsStr;\nuse std::io::{self, Error, ErrorKind, Result};\nuse std::iter::once;\nuse std::os::windows::ffi::OsStrExt;\nuse std::sync::mpsc::TryRecvError;\nuse std::sync::Arc;\n\nuse crate::event::{OnResize, WindowSize};\nuse crate::tty::windows::child::ChildExitWatcher;\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options, Shell};\n\nmod blocking;\nmod child;\nmod conpty;\n\nuse blocking::{UnblockedReader, UnblockedWriter};\nuse conpty::Conpty as Backend;\nuse miow::pipe::{AnonRead, AnonWrite};\nuse polling::{Event, Poller};\n\npub const PTY_CHILD_EVENT_TOKEN: usize = 1;\npub const PTY_READ_WRITE_TOKEN: usize = 2;\n\ntype ReadPipe = UnblockedReader;\ntype WritePipe = UnblockedWriter;\n\n", "meta": {"hash_id": "c4dfe9598b1b0d265cd07715db093cb2dde57d95421695f27d71e954df813e40"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 1, "content": "pub struct Pty {\n // XXX: Backend is required to be the first field, to ensure correct drop order. Dropping\n // `conout` before `backend` will cause a deadlock (with Conpty).\n backend: Backend,\n conout: ReadPipe,\n conin: WritePipe,\n child_watcher: ChildExitWatcher,\n}\n\npub fn new(config: &Options, window_size: WindowSize, _window_id: u64) -> Result {\n conpty::new(config, window_size)\n .ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n}\n\n", "meta": {"hash_id": "41126b1f1e79a84d6829af425821739e357da0d70e44a5882e513e11c74abb2f"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 2, "content": "impl Pty {\n fn new(\n backend: impl Into,\n conout: impl Into,\n conin: impl Into,\n child_watcher: ChildExitWatcher,\n ) -> Self {\n Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }\n }\n\n pub fn child_watcher(&self) -> &ChildExitWatcher {\n &self.child_watcher\n }\n}\n\nfn with_key(mut event: Event, key: usize) -> Event {\n event.key = key;\n event\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = ReadPipe;\n type Writer = WritePipe;\n\n", "meta": {"hash_id": "c5ee0946a6b3dd81198a27892aed1620e5abe2fe04fb51fff71e9daf25f1da74"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 3, "content": " #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n", "meta": {"hash_id": "8a91bcbab9c661df0689ca9ee38e3ec3172666c791d7560d5bd2b97409cdc9c6"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 4, "content": " Ok(())\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n interest: polling::Event,\n poll_opts: polling::PollMode,\n ) -> io::Result<()> {\n self.conin.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.conout.register(poll, with_key(interest, PTY_READ_WRITE_TOKEN), poll_opts);\n self.child_watcher.register(poll, with_key(interest, PTY_CHILD_EVENT_TOKEN));\n\n", "meta": {"hash_id": "5ee314f555663730e068647aa3c9ec04f5ca137c3788bbee9e221664deef2d20"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 5, "content": " Ok(())\n }\n\n #[inline]\n fn deregister(&mut self, _poll: &Arc) -> io::Result<()> {\n self.conin.deregister();\n self.conout.deregister();\n self.child_watcher.deregister();\n\n Ok(())\n }\n\n #[inline]\n fn reader(&mut self) -> &mut Self::Reader {\n &mut self.conout\n }\n\n #[inline]\n fn writer(&mut self) -> &mut Self::Writer {\n &mut self.conin\n }\n}\n\n", "meta": {"hash_id": "c298b55b09cce72347906751e7e639ec12bad59ee6fd9f940248f16005505b9a"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 6, "content": "impl EventedPty for Pty {\n fn next_child_event(&mut self) -> Option {\n match self.child_watcher.event_rx().try_recv() {\n Ok(ev) => Some(ev),\n Err(TryRecvError::Empty) => None,\n Err(TryRecvError::Disconnected) => Some(ChildEvent::Exited(None)),\n }\n }\n}\n\nimpl OnResize for Pty {\n fn on_resize(&mut self, window_size: WindowSize) {\n self.backend.on_resize(window_size)\n }\n}\n\nfn cmdline(config: &Options) -> String {\n let default_shell = Shell::new(\"powershell\".to_owned(), Vec::new());\n let shell = config.shell.as_ref().unwrap_or(&default_shell);\n\n", "meta": {"hash_id": "beb10893d1626de0b46db436bb55c9468b8f02553d95a80ed39ead448daa4288"}}, {"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 7, "content": " once(shell.program.as_str())\n .chain(shell.args.iter().map(|s| s.as_str()))\n .collect::>()\n .join(\" \")\n}\n\n/// Converts the string slice into a Windows-standard representation for \"W\"-\n/// suffixed function variants, which accept UTF-16 encoded string values.\npub fn win32_string + ?Sized>(value: &S) -> Vec {\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n}\n", "meta": {"hash_id": "4e20ac0eb1834a2f07ce4ec07c272fcb9da959d899fa3924b8c565c661681634"}}]}], "golden_chunks": [{"doc_uuid": "8c07c6723715401237471af50bc77e924d3ae094fd1f689aafa60b9845809d94", "index": 2, "content": "impl Pty {\n fn new(\n backend: impl Into,\n conout: impl Into,\n conin: impl Into,\n child_watcher: ChildExitWatcher,\n ) -> Self {\n Self { backend: backend.into(), conout: conout.into(), conin: conin.into(), child_watcher }\n }\n\n pub fn child_watcher(&self) -> &ChildExitWatcher {\n &self.child_watcher\n }\n}\n\nfn with_key(mut event: Event, key: usize) -> Event {\n event.key = key;\n event\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = ReadPipe;\n type Writer = WritePipe;\n\n", "meta": {"hash_id": "c5ee0946a6b3dd81198a27892aed1620e5abe2fe04fb51fff71e9daf25f1da74"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/tty/windows/mod.rs", "repo_name": "alacritty/alacritty", "num_chunks": 8, "source": "codebases/jsonl"}} +{"query": "What does the `merge` function do?", "answer": "The `merge` function takes two `Value` instances (`base` and `replacement`) from the `toml` crate and merges them together. It returns a new `Value` that combines the values from both inputs, favoring `replacement` values whenever there is a conflict. This can be seen in the match expression:\n```rust\nmatch (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n}\n```\nIf both values are arrays, it appends the `replacement` array to the `base` array. If both are tables (key-value maps), it calls `merge_tables` to recursively merge them. Otherwise, it simply returns the `replacement` value.", "golden_doc_uuids": ["63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc"], "golden_chunk_uuids": [["63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", 0]], "golden_documents": [{"uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Boolean(false);\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Integer(0.into());\n let replacement = Value::Integer(1.into());\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::String(String::new());\n let replacement = Value::String(String::from(\"test\"));\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Table(Table::new());\n let replacement = Value::Table(Table::new());\n assert_eq!(merge(base.clone(), replacement), base);\n }\n\n #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n let mut expected_table = Table::new();\n expected_table.insert(String::from(\"b\"), Value::Boolean(false));\n expected_table.insert(String::from(\"a\"), Value::Boolean(true));\n expected_table.insert(String::from(\"c\"), Value::Boolean(false));\n let expected = Value::Table(expected_table);\n\n assert_eq!(merged, expected);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/serde_utils.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 0, "content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n", "meta": {"hash_id": "9cb0b4c58c737ddcd5d0874aae6e15b612af2c9a81a345444bb7e5e7b3bdffd1"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 1, "content": "/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n", "meta": {"hash_id": "d0671e8460f24b5da18ed16087fdc57d3d521b4d37a214a1b8288229cbef5fc2"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 2, "content": " let base = Value::Boolean(false);\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Integer(0.into());\n let replacement = Value::Integer(1.into());\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::String(String::new());\n let replacement = Value::String(String::from(\"test\"));\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Table(Table::new());\n let replacement = Value::Table(Table::new());\n assert_eq!(merge(base.clone(), replacement), base);\n }\n\n", "meta": {"hash_id": "dc50d2f2a6e61d872c70d1cb4e0ca533416bad0a0df4a06c0dc7dc6c0281db4d"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 3, "content": " #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n", "meta": {"hash_id": "93d38e4103052b4024bfe0485a7d66397306cd8c0119e7c06c90c1c9a75cbfc0"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 4, "content": " let mut expected_table = Table::new();\n expected_table.insert(String::from(\"b\"), Value::Boolean(false));\n expected_table.insert(String::from(\"a\"), Value::Boolean(true));\n expected_table.insert(String::from(\"c\"), Value::Boolean(false));\n let expected = Value::Table(expected_table);\n\n assert_eq!(merged, expected);\n }\n}\n", "meta": {"hash_id": "0ff3b8d5210329fd0940a72896d473e7361e43d9e352088b9536a570ff71050f"}}]}], "golden_chunks": [{"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 0, "content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n", "meta": {"hash_id": "9cb0b4c58c737ddcd5d0874aae6e15b612af2c9a81a345444bb7e5e7b3bdffd1"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/serde_utils.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How does the `merge_tables` function work?", "answer": "The `merge_tables` function takes two `Table` instances (`base` and `replacement`) and merges them together. It iterates over each key-value pair in the `replacement` table. For each pair, it checks if the key exists in the `base` table using `base.remove(&key)`. If the key exists, it recursively calls `merge` on the corresponding values from `base` and `replacement`. If the key doesn't exist in `base`, it simply uses the value from `replacement`. Finally, it inserts the merged or replacement value into the `base` table with the current key. This process merges the tables, giving precedence to values from the `replacement` table. The modified `base` table is then returned.", "golden_doc_uuids": ["63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc"], "golden_chunk_uuids": [["63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", 1]], "golden_documents": [{"uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Boolean(false);\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Integer(0.into());\n let replacement = Value::Integer(1.into());\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::String(String::new());\n let replacement = Value::String(String::from(\"test\"));\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Table(Table::new());\n let replacement = Value::Table(Table::new());\n assert_eq!(merge(base.clone(), replacement), base);\n }\n\n #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n let mut expected_table = Table::new();\n expected_table.insert(String::from(\"b\"), Value::Boolean(false));\n expected_table.insert(String::from(\"a\"), Value::Boolean(true));\n expected_table.insert(String::from(\"c\"), Value::Boolean(false));\n let expected = Value::Table(expected_table);\n\n assert_eq!(merged, expected);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/serde_utils.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 0, "content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n", "meta": {"hash_id": "9cb0b4c58c737ddcd5d0874aae6e15b612af2c9a81a345444bb7e5e7b3bdffd1"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 1, "content": "/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n", "meta": {"hash_id": "d0671e8460f24b5da18ed16087fdc57d3d521b4d37a214a1b8288229cbef5fc2"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 2, "content": " let base = Value::Boolean(false);\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Integer(0.into());\n let replacement = Value::Integer(1.into());\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::String(String::new());\n let replacement = Value::String(String::from(\"test\"));\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Table(Table::new());\n let replacement = Value::Table(Table::new());\n assert_eq!(merge(base.clone(), replacement), base);\n }\n\n", "meta": {"hash_id": "dc50d2f2a6e61d872c70d1cb4e0ca533416bad0a0df4a06c0dc7dc6c0281db4d"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 3, "content": " #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n", "meta": {"hash_id": "93d38e4103052b4024bfe0485a7d66397306cd8c0119e7c06c90c1c9a75cbfc0"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 4, "content": " let mut expected_table = Table::new();\n expected_table.insert(String::from(\"b\"), Value::Boolean(false));\n expected_table.insert(String::from(\"a\"), Value::Boolean(true));\n expected_table.insert(String::from(\"c\"), Value::Boolean(false));\n let expected = Value::Table(expected_table);\n\n assert_eq!(merged, expected);\n }\n}\n", "meta": {"hash_id": "0ff3b8d5210329fd0940a72896d473e7361e43d9e352088b9536a570ff71050f"}}]}], "golden_chunks": [{"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 1, "content": "/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n", "meta": {"hash_id": "d0671e8460f24b5da18ed16087fdc57d3d521b4d37a214a1b8288229cbef5fc2"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/serde_utils.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "What does the `merge_sequence` test verify?", "answer": "The `merge_sequence` test verifies the behavior of the `merge` function when merging sequences (arrays). It creates a `base` array containing a single empty `Table` and a `replacement` array containing a single `Boolean` value. It then calls `merge(base, replacement)` and expects the result to be an array containing both the empty `Table` from `base` and the `Boolean` value from `replacement`. This test ensures that when merging arrays, the elements from both arrays are combined into a single resulting array.", "golden_doc_uuids": ["63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc"], "golden_chunk_uuids": [["63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", 3]], "golden_documents": [{"uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Boolean(false);\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Integer(0.into());\n let replacement = Value::Integer(1.into());\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::String(String::new());\n let replacement = Value::String(String::from(\"test\"));\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Table(Table::new());\n let replacement = Value::Table(Table::new());\n assert_eq!(merge(base.clone(), replacement), base);\n }\n\n #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n let mut expected_table = Table::new();\n expected_table.insert(String::from(\"b\"), Value::Boolean(false));\n expected_table.insert(String::from(\"a\"), Value::Boolean(true));\n expected_table.insert(String::from(\"c\"), Value::Boolean(false));\n let expected = Value::Table(expected_table);\n\n assert_eq!(merged, expected);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/serde_utils.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 0, "content": "//! Serde helpers.\n\nuse toml::{Table, Value};\n\n/// Merge two serde structures.\n///\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n/// `replacement`.\npub fn merge(base: Value, replacement: Value) -> Value {\n match (base, replacement) {\n (Value::Array(mut base), Value::Array(mut replacement)) => {\n base.append(&mut replacement);\n Value::Array(base)\n },\n (Value::Table(base), Value::Table(replacement)) => {\n Value::Table(merge_tables(base, replacement))\n },\n (_, value) => value,\n }\n}\n\n", "meta": {"hash_id": "9cb0b4c58c737ddcd5d0874aae6e15b612af2c9a81a345444bb7e5e7b3bdffd1"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 1, "content": "/// Merge two key/value tables.\nfn merge_tables(mut base: Table, replacement: Table) -> Table {\n for (key, value) in replacement {\n let value = match base.remove(&key) {\n Some(base_value) => merge(base_value, value),\n None => value,\n };\n base.insert(key, value);\n }\n\n base\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn merge_primitive() {\n let base = Value::Table(Table::new());\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n", "meta": {"hash_id": "d0671e8460f24b5da18ed16087fdc57d3d521b4d37a214a1b8288229cbef5fc2"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 2, "content": " let base = Value::Boolean(false);\n let replacement = Value::Boolean(true);\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Integer(0.into());\n let replacement = Value::Integer(1.into());\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::String(String::new());\n let replacement = Value::String(String::from(\"test\"));\n assert_eq!(merge(base, replacement.clone()), replacement);\n\n let base = Value::Table(Table::new());\n let replacement = Value::Table(Table::new());\n assert_eq!(merge(base.clone(), replacement), base);\n }\n\n", "meta": {"hash_id": "dc50d2f2a6e61d872c70d1cb4e0ca533416bad0a0df4a06c0dc7dc6c0281db4d"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 3, "content": " #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n", "meta": {"hash_id": "93d38e4103052b4024bfe0485a7d66397306cd8c0119e7c06c90c1c9a75cbfc0"}}, {"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 4, "content": " let mut expected_table = Table::new();\n expected_table.insert(String::from(\"b\"), Value::Boolean(false));\n expected_table.insert(String::from(\"a\"), Value::Boolean(true));\n expected_table.insert(String::from(\"c\"), Value::Boolean(false));\n let expected = Value::Table(expected_table);\n\n assert_eq!(merged, expected);\n }\n}\n", "meta": {"hash_id": "0ff3b8d5210329fd0940a72896d473e7361e43d9e352088b9536a570ff71050f"}}]}], "golden_chunks": [{"doc_uuid": "63734181335dc6b208cfc5728b9413f7967c140adcf4332d61a1a16ae8df9fbc", "index": 3, "content": " #[test]\n fn merge_sequence() {\n let base = Value::Array(vec![Value::Table(Table::new())]);\n let replacement = Value::Array(vec![Value::Boolean(true)]);\n let expected = Value::Array(vec![Value::Table(Table::new()), Value::Boolean(true)]);\n assert_eq!(merge(base, replacement), expected);\n }\n\n #[test]\n fn merge_tables() {\n let mut base_table = Table::new();\n base_table.insert(String::from(\"a\"), Value::Boolean(true));\n base_table.insert(String::from(\"b\"), Value::Boolean(false));\n let base = Value::Table(base_table);\n\n let mut replacement_table = Table::new();\n replacement_table.insert(String::from(\"a\"), Value::Boolean(true));\n replacement_table.insert(String::from(\"c\"), Value::Boolean(false));\n let replacement = Value::Table(replacement_table);\n\n let merged = merge(base, replacement);\n\n", "meta": {"hash_id": "93d38e4103052b4024bfe0485a7d66397306cd8c0119e7c06c90c1c9a75cbfc0"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/serde_utils.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How does the `get_pw_entry` function work?", "answer": "The `get_pw_entry` function retrieves the password entry for the current user. It takes a mutable buffer `buf` as input. The function first creates a zeroed `passwd` struct using `MaybeUninit`. It then calls the `getpwuid_r` function, passing the current user's UID, the password entry struct, the buffer, and the buffer length. If the call is successful and the password entry is found, the function returns a `Passwd` struct containing borrowed references to the username, home directory, and shell from the password entry. If the call fails or the password entry is not found, an error is returned.", "golden_doc_uuids": ["28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8"], "golden_chunk_uuids": [["28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", 2]], "golden_documents": [{"uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "content": "//! TTY related functionality.\n\nuse std::ffi::CStr;\nuse std::fs::File;\nuse std::io::{Error, ErrorKind, Read, Result};\nuse std::mem::MaybeUninit;\nuse std::os::unix::io::{AsRawFd, FromRawFd};\nuse std::os::unix::net::UnixStream;\nuse std::os::unix::process::CommandExt;\nuse std::process::{Child, Command, Stdio};\nuse std::sync::Arc;\nuse std::{env, ptr};\n\nuse libc::{c_int, TIOCSCTTY};\nuse log::error;\nuse polling::{Event, PollMode, Poller};\nuse rustix_openpty::openpty;\nuse rustix_openpty::rustix::termios::Winsize;\n#[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\nuse rustix_openpty::rustix::termios::{self, InputModes, OptionalActions};\nuse signal_hook::consts as sigconsts;\nuse signal_hook::low_level::pipe as signal_pipe;\n\nuse crate::event::{OnResize, WindowSize};\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options};\n\n// Interest in PTY read/writes.\npub(crate) const PTY_READ_WRITE_TOKEN: usize = 0;\n\n// Interest in new child events.\npub(crate) const PTY_CHILD_EVENT_TOKEN: usize = 1;\n\nmacro_rules! die {\n ($($arg:tt)*) => {{\n error!($($arg)*);\n std::process::exit(1);\n }}\n}\n\n/// Really only needed on BSD, but should be fine elsewhere.\nfn set_controlling_terminal(fd: c_int) {\n let res = unsafe {\n // TIOSCTTY changes based on platform and the `ioctl` call is different\n // based on architecture (32/64). So a generic cast is used to make sure\n // there are no issues. To allow such a generic cast the clippy warning\n // is disabled.\n #[allow(clippy::cast_lossless)]\n libc::ioctl(fd, TIOCSCTTY as _, 0)\n };\n\n if res < 0 {\n die!(\"ioctl TIOCSCTTY failed: {}\", Error::last_os_error());\n }\n}\n\n#[derive(Debug)]\nstruct Passwd<'a> {\n name: &'a str,\n dir: &'a str,\n shell: &'a str,\n}\n\n/// Return a Passwd struct with pointers into the provided buf.\n///\n/// # Unsafety\n///\n/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.\nfn get_pw_entry(buf: &mut [i8; 1024]) -> Result> {\n // Create zeroed passwd struct.\n let mut entry: MaybeUninit = MaybeUninit::uninit();\n\n let mut res: *mut libc::passwd = ptr::null_mut();\n\n // Try and read the pw file.\n let uid = unsafe { libc::getuid() };\n let status = unsafe {\n libc::getpwuid_r(uid, entry.as_mut_ptr(), buf.as_mut_ptr() as *mut _, buf.len(), &mut res)\n };\n let entry = unsafe { entry.assume_init() };\n\n if status < 0 {\n return Err(Error::new(ErrorKind::Other, \"getpwuid_r failed\"));\n }\n\n if res.is_null() {\n return Err(Error::new(ErrorKind::Other, \"pw not found\"));\n }\n\n // Sanity check.\n assert_eq!(entry.pw_uid, uid);\n\n // Build a borrowed Passwd struct.\n Ok(Passwd {\n name: unsafe { CStr::from_ptr(entry.pw_name).to_str().unwrap() },\n dir: unsafe { CStr::from_ptr(entry.pw_dir).to_str().unwrap() },\n shell: unsafe { CStr::from_ptr(entry.pw_shell).to_str().unwrap() },\n })\n}\n\npub struct Pty {\n child: Child,\n file: File,\n signals: UnixStream,\n}\n\nimpl Pty {\n pub fn child(&self) -> &Child {\n &self.child\n }\n\n pub fn file(&self) -> &File {\n &self.file\n }\n}\n\n/// User information that is required for a new shell session.\nstruct ShellUser {\n user: String,\n home: String,\n shell: String,\n}\n\nimpl ShellUser {\n /// look for shell, username, longname, and home dir in the respective environment variables\n /// before falling back on looking in to `passwd`.\n fn from_env() -> Result {\n let mut buf = [0; 1024];\n let pw = get_pw_entry(&mut buf);\n\n let user = match env::var(\"USER\") {\n Ok(user) => user,\n Err(_) => match pw {\n Ok(ref pw) => pw.name.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let home = match env::var(\"HOME\") {\n Ok(home) => home,\n Err(_) => match pw {\n Ok(ref pw) => pw.dir.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let shell = match env::var(\"SHELL\") {\n Ok(shell) => shell,\n Err(_) => match pw {\n Ok(ref pw) => pw.shell.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n Ok(Self { user, home, shell })\n }\n}\n\n#[cfg(not(target_os = \"macos\"))]\nfn default_shell_command(shell: &str, _user: &str) -> Command {\n Command::new(shell)\n}\n\n#[cfg(target_os = \"macos\")]\nfn default_shell_command(shell: &str, user: &str) -> Command {\n let shell_name = shell.rsplit('/').next().unwrap();\n\n // On macOS, use the `login` command so the shell will appear as a tty session.\n let mut login_command = Command::new(\"/usr/bin/login\");\n\n // Exec the shell with argv[0] prepended by '-' so it becomes a login shell.\n // `login` normally does this itself, but `-l` disables this.\n let exec = format!(\"exec -a -{} {}\", shell_name, shell);\n\n // -f: Bypasses authentication for the already-logged-in user.\n // -l: Skips changing directory to $HOME and prepending '-' to argv[0].\n // -p: Preserves the environment.\n //\n // XXX: we use zsh here over sh due to `exec -a`.\n login_command.args([\"-flp\", user, \"/bin/zsh\", \"-c\", &exec]);\n login_command\n}\n\n/// Create a new TTY and return a handle to interact with it.\npub fn new(config: &Options, window_size: WindowSize, window_id: u64) -> Result {\n let pty = openpty(None, Some(&window_size.to_winsize()))?;\n let (master, slave) = (pty.controller, pty.user);\n let master_fd = master.as_raw_fd();\n let slave_fd = slave.as_raw_fd();\n\n #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n if let Ok(mut termios) = termios::tcgetattr(&master) {\n // Set character encoding to UTF-8.\n termios.input_modes.set(InputModes::IUTF8, true);\n let _ = termios::tcsetattr(&master, OptionalActions::Now, &termios);\n }\n\n let user = ShellUser::from_env()?;\n\n let mut builder = if let Some(shell) = config.shell.as_ref() {\n let mut cmd = Command::new(&shell.program);\n cmd.args(shell.args.as_slice());\n cmd\n } else {\n default_shell_command(&user.shell, &user.user)\n };\n\n // Setup child stdin/stdout/stderr as slave fd of PTY.\n // Ownership of fd is transferred to the Stdio structs and will be closed by them at the end of\n // this scope. (It is not an issue that the fd is closed three times since File::drop ignores\n // error on libc::close.).\n builder.stdin(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stderr(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stdout(unsafe { Stdio::from_raw_fd(slave_fd) });\n\n // Setup shell environment.\n let window_id = window_id.to_string();\n builder.env(\"ALACRITTY_WINDOW_ID\", &window_id);\n builder.env(\"USER\", user.user);\n builder.env(\"HOME\", user.home);\n // Set Window ID for clients relying on X11 hacks.\n builder.env(\"WINDOWID\", window_id);\n for (key, value) in &config.env {\n builder.env(key, value);\n }\n\n unsafe {\n builder.pre_exec(move || {\n // Create a new process group.\n let err = libc::setsid();\n if err == -1 {\n return Err(Error::new(ErrorKind::Other, \"Failed to set session id\"));\n }\n\n set_controlling_terminal(slave_fd);\n\n // No longer need slave/master fds.\n libc::close(slave_fd);\n libc::close(master_fd);\n\n libc::signal(libc::SIGCHLD, libc::SIG_DFL);\n libc::signal(libc::SIGHUP, libc::SIG_DFL);\n libc::signal(libc::SIGINT, libc::SIG_DFL);\n libc::signal(libc::SIGQUIT, libc::SIG_DFL);\n libc::signal(libc::SIGTERM, libc::SIG_DFL);\n libc::signal(libc::SIGALRM, libc::SIG_DFL);\n\n Ok(())\n });\n }\n\n // Handle set working directory option.\n if let Some(dir) = &config.working_directory {\n builder.current_dir(dir);\n }\n\n // Prepare signal handling before spawning child.\n let signals = {\n let (sender, recv) = UnixStream::pair()?;\n\n // Register the recv end of the pipe for SIGCHLD.\n signal_pipe::register(sigconsts::SIGCHLD, sender)?;\n recv.set_nonblocking(true)?;\n recv\n };\n\n match builder.spawn() {\n Ok(child) => {\n unsafe {\n // Maybe this should be done outside of this function so nonblocking\n // isn't forced upon consumers. Although maybe it should be?\n set_nonblocking(master_fd);\n }\n\n Ok(Pty { child, file: File::from(master), signals })\n },\n Err(err) => Err(Error::new(\n err.kind(),\n format!(\n \"Failed to spawn command '{}': {}\",\n builder.get_program().to_string_lossy(),\n err\n ),\n )),\n }\n}\n\nimpl Drop for Pty {\n fn drop(&mut self) {\n // Make sure the PTY is terminated properly.\n unsafe {\n libc::kill(self.child.id() as i32, libc::SIGHUP);\n }\n let _ = self.child.wait();\n }\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = File;\n type Writer = File;\n\n #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n unsafe {\n poll.add_with_mode(&self.file, interest, poll_opts)?;\n }\n\n unsafe {\n poll.add_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n poll.modify_with_mode(&self.file, interest, poll_opts)?;\n\n poll.modify_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n\n #[inline]\n fn deregister(&mut self, poll: &Arc) -> Result<()> {\n poll.delete(&self.file)?;\n poll.delete(&self.signals)\n }\n\n #[inline]\n fn reader(&mut self) -> &mut File {\n &mut self.file\n }\n\n #[inline]\n fn writer(&mut self) -> &mut File {\n &mut self.file\n }\n}\n\nimpl EventedPty for Pty {\n #[inline]\n fn next_child_event(&mut self) -> Option {\n // See if there has been a SIGCHLD.\n let mut buf = [0u8; 1];\n if let Err(err) = self.signals.read(&mut buf) {\n if err.kind() != ErrorKind::WouldBlock {\n error!(\"Error reading from signal pipe: {}\", err);\n }\n return None;\n }\n\n // Match on the child process.\n match self.child.try_wait() {\n Err(err) => {\n error!(\"Error checking child process termination: {}\", err);\n None\n },\n Ok(None) => None,\n Ok(exit_status) => Some(ChildEvent::Exited(exit_status.and_then(|s| s.code()))),\n }\n }\n}\n\nimpl OnResize for Pty {\n /// Resize the PTY.\n ///\n /// Tells the kernel that the window size changed with the new pixel\n /// dimensions and line/column counts.\n fn on_resize(&mut self, window_size: WindowSize) {\n let win = window_size.to_winsize();\n\n let res = unsafe { libc::ioctl(self.file.as_raw_fd(), libc::TIOCSWINSZ, &win as *const _) };\n\n if res < 0 {\n die!(\"ioctl TIOCSWINSZ failed: {}\", Error::last_os_error());\n }\n }\n}\n\n/// Types that can produce a `Winsize`.\npub trait ToWinsize {\n /// Get a `Winsize`.\n fn to_winsize(self) -> Winsize;\n}\n\nimpl ToWinsize for WindowSize {\n fn to_winsize(self) -> Winsize {\n let ws_row = self.num_lines as libc::c_ushort;\n let ws_col = self.num_cols as libc::c_ushort;\n\n let ws_xpixel = ws_col * self.cell_width as libc::c_ushort;\n let ws_ypixel = ws_row * self.cell_height as libc::c_ushort;\n Winsize { ws_row, ws_col, ws_xpixel, ws_ypixel }\n }\n}\n\nunsafe fn set_nonblocking(fd: c_int) {\n use libc::{fcntl, F_GETFL, F_SETFL, O_NONBLOCK};\n\n let res = fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK);\n assert_eq!(res, 0);\n}\n\n#[test]\nfn test_get_pw_entry() {\n let mut buf: [i8; 1024] = [0; 1024];\n let _pw = get_pw_entry(&mut buf).unwrap();\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/tty/unix.rs", "repo_name": "alacritty/alacritty", "num_chunks": 19, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 0, "content": "//! TTY related functionality.\n\nuse std::ffi::CStr;\nuse std::fs::File;\nuse std::io::{Error, ErrorKind, Read, Result};\nuse std::mem::MaybeUninit;\nuse std::os::unix::io::{AsRawFd, FromRawFd};\nuse std::os::unix::net::UnixStream;\nuse std::os::unix::process::CommandExt;\nuse std::process::{Child, Command, Stdio};\nuse std::sync::Arc;\nuse std::{env, ptr};\n\nuse libc::{c_int, TIOCSCTTY};\nuse log::error;\nuse polling::{Event, PollMode, Poller};\nuse rustix_openpty::openpty;\nuse rustix_openpty::rustix::termios::Winsize;\n#[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\nuse rustix_openpty::rustix::termios::{self, InputModes, OptionalActions};\nuse signal_hook::consts as sigconsts;\nuse signal_hook::low_level::pipe as signal_pipe;\n\n", "meta": {"hash_id": "34d3629c6aed880626d26574809be3df8516d6a7ad2ffda7c8efb091b28c3d06"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 1, "content": "use crate::event::{OnResize, WindowSize};\nuse crate::tty::{ChildEvent, EventedPty, EventedReadWrite, Options};\n\n// Interest in PTY read/writes.\npub(crate) const PTY_READ_WRITE_TOKEN: usize = 0;\n\n// Interest in new child events.\npub(crate) const PTY_CHILD_EVENT_TOKEN: usize = 1;\n\nmacro_rules! die {\n ($($arg:tt)*) => {{\n error!($($arg)*);\n std::process::exit(1);\n }}\n}\n\n/// Really only needed on BSD, but should be fine elsewhere.\nfn set_controlling_terminal(fd: c_int) {\n let res = unsafe {\n // TIOSCTTY changes based on platform and the `ioctl` call is different\n // based on architecture (32/64). So a generic cast is used to make sure\n // there are no issues. To allow such a generic cast the clippy warning\n // is disabled.\n #[allow(clippy::cast_lossless)]\n libc::ioctl(fd, TIOCSCTTY as _, 0)\n };\n\n", "meta": {"hash_id": "638242137e73a608d0c35ae5a95a2087f5c084fb90261713718cebf13d670897"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 2, "content": " if res < 0 {\n die!(\"ioctl TIOCSCTTY failed: {}\", Error::last_os_error());\n }\n}\n\n#[derive(Debug)]\nstruct Passwd<'a> {\n name: &'a str,\n dir: &'a str,\n shell: &'a str,\n}\n\n/// Return a Passwd struct with pointers into the provided buf.\n///\n/// # Unsafety\n///\n/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.\nfn get_pw_entry(buf: &mut [i8; 1024]) -> Result> {\n // Create zeroed passwd struct.\n let mut entry: MaybeUninit = MaybeUninit::uninit();\n\n let mut res: *mut libc::passwd = ptr::null_mut();\n\n // Try and read the pw file.\n let uid = unsafe { libc::getuid() };\n let status = unsafe {\n libc::getpwuid_r(uid, entry.as_mut_ptr(), buf.as_mut_ptr() as *mut _, buf.len(), &mut res)\n };\n let entry = unsafe { entry.assume_init() };\n\n", "meta": {"hash_id": "018ec82da1d45a7a4786c25a34bce33e31434125fff1f1daf48ff90c70e8ea16"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 3, "content": " if status < 0 {\n return Err(Error::new(ErrorKind::Other, \"getpwuid_r failed\"));\n }\n\n if res.is_null() {\n return Err(Error::new(ErrorKind::Other, \"pw not found\"));\n }\n\n // Sanity check.\n assert_eq!(entry.pw_uid, uid);\n\n // Build a borrowed Passwd struct.\n Ok(Passwd {\n name: unsafe { CStr::from_ptr(entry.pw_name).to_str().unwrap() },\n dir: unsafe { CStr::from_ptr(entry.pw_dir).to_str().unwrap() },\n shell: unsafe { CStr::from_ptr(entry.pw_shell).to_str().unwrap() },\n })\n}\n\n", "meta": {"hash_id": "938890e94ad6b222d6e5f98d96f8d979a105bd5faafdab800b757030a08f3958"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 4, "content": "pub struct Pty {\n child: Child,\n file: File,\n signals: UnixStream,\n}\n\nimpl Pty {\n pub fn child(&self) -> &Child {\n &self.child\n }\n\n pub fn file(&self) -> &File {\n &self.file\n }\n}\n\n/// User information that is required for a new shell session.\nstruct ShellUser {\n user: String,\n home: String,\n shell: String,\n}\n\nimpl ShellUser {\n /// look for shell, username, longname, and home dir in the respective environment variables\n /// before falling back on looking in to `passwd`.\n fn from_env() -> Result {\n let mut buf = [0; 1024];\n let pw = get_pw_entry(&mut buf);\n\n", "meta": {"hash_id": "06fe4bf0062e15b5b642279c5bf793fcdacdd16c18974add5d21e6783cd5518a"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 5, "content": " let user = match env::var(\"USER\") {\n Ok(user) => user,\n Err(_) => match pw {\n Ok(ref pw) => pw.name.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let home = match env::var(\"HOME\") {\n Ok(home) => home,\n Err(_) => match pw {\n Ok(ref pw) => pw.dir.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n let shell = match env::var(\"SHELL\") {\n Ok(shell) => shell,\n Err(_) => match pw {\n Ok(ref pw) => pw.shell.to_owned(),\n Err(err) => return Err(err),\n },\n };\n\n Ok(Self { user, home, shell })\n }\n}\n\n#[cfg(not(target_os = \"macos\"))]\nfn default_shell_command(shell: &str, _user: &str) -> Command {\n Command::new(shell)\n}\n\n", "meta": {"hash_id": "edece948170839aa97f65ff94bf965d14eae3e704f1fb8621210b5528d29a02e"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 6, "content": "#[cfg(target_os = \"macos\")]\nfn default_shell_command(shell: &str, user: &str) -> Command {\n let shell_name = shell.rsplit('/').next().unwrap();\n\n // On macOS, use the `login` command so the shell will appear as a tty session.\n let mut login_command = Command::new(\"/usr/bin/login\");\n\n // Exec the shell with argv[0] prepended by '-' so it becomes a login shell.\n // `login` normally does this itself, but `-l` disables this.\n let exec = format!(\"exec -a -{} {}\", shell_name, shell);\n\n", "meta": {"hash_id": "dbe4fc8bf248c9a17f06791b3231540d480f973b6d44a915b89de31962ec9c4b"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 7, "content": " // -f: Bypasses authentication for the already-logged-in user.\n // -l: Skips changing directory to $HOME and prepending '-' to argv[0].\n // -p: Preserves the environment.\n //\n // XXX: we use zsh here over sh due to `exec -a`.\n login_command.args([\"-flp\", user, \"/bin/zsh\", \"-c\", &exec]);\n login_command\n}\n\n/// Create a new TTY and return a handle to interact with it.\npub fn new(config: &Options, window_size: WindowSize, window_id: u64) -> Result {\n let pty = openpty(None, Some(&window_size.to_winsize()))?;\n let (master, slave) = (pty.controller, pty.user);\n let master_fd = master.as_raw_fd();\n let slave_fd = slave.as_raw_fd();\n\n", "meta": {"hash_id": "b34b91fb134816a2db9aa347f03359ac8e97f65a9ffb4be7d3a863af5a344918"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 8, "content": " #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n if let Ok(mut termios) = termios::tcgetattr(&master) {\n // Set character encoding to UTF-8.\n termios.input_modes.set(InputModes::IUTF8, true);\n let _ = termios::tcsetattr(&master, OptionalActions::Now, &termios);\n }\n\n let user = ShellUser::from_env()?;\n\n let mut builder = if let Some(shell) = config.shell.as_ref() {\n let mut cmd = Command::new(&shell.program);\n cmd.args(shell.args.as_slice());\n cmd\n } else {\n default_shell_command(&user.shell, &user.user)\n };\n\n", "meta": {"hash_id": "2865079b02a67c8bcdcfc80f63c32c10625226413a0d6c896360925309bd9b3c"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 9, "content": " // Setup child stdin/stdout/stderr as slave fd of PTY.\n // Ownership of fd is transferred to the Stdio structs and will be closed by them at the end of\n // this scope. (It is not an issue that the fd is closed three times since File::drop ignores\n // error on libc::close.).\n builder.stdin(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stderr(unsafe { Stdio::from_raw_fd(slave_fd) });\n builder.stdout(unsafe { Stdio::from_raw_fd(slave_fd) });\n\n", "meta": {"hash_id": "5c770b70039d175b66910c5dbf2891e2d65021b38f2fefd474d71438457a78af"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 10, "content": " // Setup shell environment.\n let window_id = window_id.to_string();\n builder.env(\"ALACRITTY_WINDOW_ID\", &window_id);\n builder.env(\"USER\", user.user);\n builder.env(\"HOME\", user.home);\n // Set Window ID for clients relying on X11 hacks.\n builder.env(\"WINDOWID\", window_id);\n for (key, value) in &config.env {\n builder.env(key, value);\n }\n\n unsafe {\n builder.pre_exec(move || {\n // Create a new process group.\n let err = libc::setsid();\n if err == -1 {\n return Err(Error::new(ErrorKind::Other, \"Failed to set session id\"));\n }\n\n set_controlling_terminal(slave_fd);\n\n // No longer need slave/master fds.\n libc::close(slave_fd);\n libc::close(master_fd);\n\n", "meta": {"hash_id": "83c952180e633ff07609bdf621c22ad3b0ef6a3a2cae074d34a2f0353bfdb770"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 11, "content": " libc::signal(libc::SIGCHLD, libc::SIG_DFL);\n libc::signal(libc::SIGHUP, libc::SIG_DFL);\n libc::signal(libc::SIGINT, libc::SIG_DFL);\n libc::signal(libc::SIGQUIT, libc::SIG_DFL);\n libc::signal(libc::SIGTERM, libc::SIG_DFL);\n libc::signal(libc::SIGALRM, libc::SIG_DFL);\n\n Ok(())\n });\n }\n\n // Handle set working directory option.\n if let Some(dir) = &config.working_directory {\n builder.current_dir(dir);\n }\n\n // Prepare signal handling before spawning child.\n let signals = {\n let (sender, recv) = UnixStream::pair()?;\n\n", "meta": {"hash_id": "17cecffc0bedfc95f4f1aa3dbde3bc5c13064c6d7f4d7d305995eeba33f21d17"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 12, "content": " // Register the recv end of the pipe for SIGCHLD.\n signal_pipe::register(sigconsts::SIGCHLD, sender)?;\n recv.set_nonblocking(true)?;\n recv\n };\n\n match builder.spawn() {\n Ok(child) => {\n unsafe {\n // Maybe this should be done outside of this function so nonblocking\n // isn't forced upon consumers. Although maybe it should be?\n set_nonblocking(master_fd);\n }\n\n Ok(Pty { child, file: File::from(master), signals })\n },\n Err(err) => Err(Error::new(\n err.kind(),\n format!(\n \"Failed to spawn command '{}': {}\",\n builder.get_program().to_string_lossy(),\n err\n ),\n )),\n }\n}\n\n", "meta": {"hash_id": "534c9481e47f3d018d490c2194c91d6149aab1ba257ec3a422e5b93a7fc514b8"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 13, "content": "impl Drop for Pty {\n fn drop(&mut self) {\n // Make sure the PTY is terminated properly.\n unsafe {\n libc::kill(self.child.id() as i32, libc::SIGHUP);\n }\n let _ = self.child.wait();\n }\n}\n\nimpl EventedReadWrite for Pty {\n type Reader = File;\n type Writer = File;\n\n #[inline]\n unsafe fn register(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n unsafe {\n poll.add_with_mode(&self.file, interest, poll_opts)?;\n }\n\n", "meta": {"hash_id": "e5ed70d76e8d3170e16778e63457e0659775e1f01b3e268b67b09b96501d8d86"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 14, "content": " unsafe {\n poll.add_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n }\n\n #[inline]\n fn reregister(\n &mut self,\n poll: &Arc,\n mut interest: Event,\n poll_opts: PollMode,\n ) -> Result<()> {\n interest.key = PTY_READ_WRITE_TOKEN;\n poll.modify_with_mode(&self.file, interest, poll_opts)?;\n\n", "meta": {"hash_id": "a632c03d7ed89160759dfe735c2894e44b387ee32a97f926d2966a7e3bea26c5"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 15, "content": " poll.modify_with_mode(\n &self.signals,\n Event::readable(PTY_CHILD_EVENT_TOKEN),\n PollMode::Level,\n )\n }\n\n #[inline]\n fn deregister(&mut self, poll: &Arc) -> Result<()> {\n poll.delete(&self.file)?;\n poll.delete(&self.signals)\n }\n\n #[inline]\n fn reader(&mut self) -> &mut File {\n &mut self.file\n }\n\n #[inline]\n fn writer(&mut self) -> &mut File {\n &mut self.file\n }\n}\n\n", "meta": {"hash_id": "b2d782d94c82414d212e34a001f5ac20116cecd54fb9815216c1914bb912a19e"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 16, "content": "impl EventedPty for Pty {\n #[inline]\n fn next_child_event(&mut self) -> Option {\n // See if there has been a SIGCHLD.\n let mut buf = [0u8; 1];\n if let Err(err) = self.signals.read(&mut buf) {\n if err.kind() != ErrorKind::WouldBlock {\n error!(\"Error reading from signal pipe: {}\", err);\n }\n return None;\n }\n\n // Match on the child process.\n match self.child.try_wait() {\n Err(err) => {\n error!(\"Error checking child process termination: {}\", err);\n None\n },\n Ok(None) => None,\n Ok(exit_status) => Some(ChildEvent::Exited(exit_status.and_then(|s| s.code()))),\n }\n }\n}\n\n", "meta": {"hash_id": "1b63432e467c3cbed1dce59026453d10650e0fd50af36c38612a3b5fa5224a7a"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 17, "content": "impl OnResize for Pty {\n /// Resize the PTY.\n ///\n /// Tells the kernel that the window size changed with the new pixel\n /// dimensions and line/column counts.\n fn on_resize(&mut self, window_size: WindowSize) {\n let win = window_size.to_winsize();\n\n let res = unsafe { libc::ioctl(self.file.as_raw_fd(), libc::TIOCSWINSZ, &win as *const _) };\n\n if res < 0 {\n die!(\"ioctl TIOCSWINSZ failed: {}\", Error::last_os_error());\n }\n }\n}\n\n", "meta": {"hash_id": "8746d004d40c29ec12c250fd6117f311d8a745c5ae20179e0cd8f93a742b6545"}}, {"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 18, "content": "/// Types that can produce a `Winsize`.\npub trait ToWinsize {\n /// Get a `Winsize`.\n fn to_winsize(self) -> Winsize;\n}\n\nimpl ToWinsize for WindowSize {\n fn to_winsize(self) -> Winsize {\n let ws_row = self.num_lines as libc::c_ushort;\n let ws_col = self.num_cols as libc::c_ushort;\n\n let ws_xpixel = ws_col * self.cell_width as libc::c_ushort;\n let ws_ypixel = ws_row * self.cell_height as libc::c_ushort;\n Winsize { ws_row, ws_col, ws_xpixel, ws_ypixel }\n }\n}\n\nunsafe fn set_nonblocking(fd: c_int) {\n use libc::{fcntl, F_GETFL, F_SETFL, O_NONBLOCK};\n\n let res = fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK);\n assert_eq!(res, 0);\n}\n\n#[test]\nfn test_get_pw_entry() {\n let mut buf: [i8; 1024] = [0; 1024];\n let _pw = get_pw_entry(&mut buf).unwrap();\n}\n", "meta": {"hash_id": "ee4bd1d4179729c55d5daa01440507c22eb7769a1c9b6ae8e872c46a2edae1dd"}}]}], "golden_chunks": [{"doc_uuid": "28bc35399cf4cb6c66ebbab6e965034ec7f505aa522f87ff8662409309224dc8", "index": 2, "content": " if res < 0 {\n die!(\"ioctl TIOCSCTTY failed: {}\", Error::last_os_error());\n }\n}\n\n#[derive(Debug)]\nstruct Passwd<'a> {\n name: &'a str,\n dir: &'a str,\n shell: &'a str,\n}\n\n/// Return a Passwd struct with pointers into the provided buf.\n///\n/// # Unsafety\n///\n/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.\nfn get_pw_entry(buf: &mut [i8; 1024]) -> Result> {\n // Create zeroed passwd struct.\n let mut entry: MaybeUninit = MaybeUninit::uninit();\n\n let mut res: *mut libc::passwd = ptr::null_mut();\n\n // Try and read the pw file.\n let uid = unsafe { libc::getuid() };\n let status = unsafe {\n libc::getpwuid_r(uid, entry.as_mut_ptr(), buf.as_mut_ptr() as *mut _, buf.len(), &mut res)\n };\n let entry = unsafe { entry.assume_init() };\n\n", "meta": {"hash_id": "018ec82da1d45a7a4786c25a34bce33e31434125fff1f1daf48ff90c70e8ea16"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/tty/unix.rs", "repo_name": "alacritty/alacritty", "num_chunks": 19, "source": "codebases/jsonl"}} +{"query": "What fields does the `Mouse` struct contain?", "answer": "The `Mouse` struct contains two fields:\n- `hide_when_typing: bool`: A boolean indicating whether the mouse should be hidden when typing.\n- `bindings: MouseBindings`: An instance of the `MouseBindings` struct, which likely represents the mouse bindings configuration.", "golden_doc_uuids": ["e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7"], "golden_chunk_uuids": [["e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", 0]], "golden_documents": [{"uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/mouse.rs", "repo_name": "alacritty/alacritty", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "index": 0, "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"hash_id": "b07d71ee193a427e34f9c39fb90add91ea49b318a00657a90cd201a35c891dee"}}]}], "golden_chunks": [{"doc_uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "index": 0, "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"hash_id": "b07d71ee193a427e34f9c39fb90add91ea49b318a00657a90cd201a35c891dee"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/mouse.rs", "repo_name": "alacritty/alacritty", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How are the default mouse bindings obtained in the `default` implementation of `MouseBindings`?", "answer": "The default mouse bindings are obtained by calling the `bindings::default_mouse_bindings()` function, which is likely defined in the `bindings` module imported at the top of the file. The returned value is then used to initialize the `MouseBindings` struct in the `default` implementation:\n```rust\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n```", "golden_doc_uuids": ["e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7"], "golden_chunk_uuids": [["e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", 0]], "golden_documents": [{"uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/mouse.rs", "repo_name": "alacritty/alacritty", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "index": 0, "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"hash_id": "b07d71ee193a427e34f9c39fb90add91ea49b318a00657a90cd201a35c891dee"}}]}], "golden_chunks": [{"doc_uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "index": 0, "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"hash_id": "b07d71ee193a427e34f9c39fb90add91ea49b318a00657a90cd201a35c891dee"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/mouse.rs", "repo_name": "alacritty/alacritty", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How is deserialization handled for the `MouseBindings` struct?", "answer": "Deserialization for the `MouseBindings` struct is implemented using a custom `Deserialize` implementation. It uses the `ui_config::deserialize_bindings` function to deserialize the bindings, passing the deserializer and the default mouse bindings as arguments:\n```rust\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n```\nThe deserialized bindings are then used to create a new `MouseBindings` instance.", "golden_doc_uuids": ["e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7"], "golden_chunk_uuids": [["e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", 0]], "golden_documents": [{"uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/mouse.rs", "repo_name": "alacritty/alacritty", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "index": 0, "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"hash_id": "b07d71ee193a427e34f9c39fb90add91ea49b318a00657a90cd201a35c891dee"}}]}], "golden_chunks": [{"doc_uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "index": 0, "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"hash_id": "b07d71ee193a427e34f9c39fb90add91ea49b318a00657a90cd201a35c891dee"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/mouse.rs", "repo_name": "alacritty/alacritty", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `MouseBinding` type?", "answer": "The `MouseBinding` type is likely used to represent an individual mouse binding configuration. It is imported from the `alacritty_config::config::bindings` module at the top of the file:\n```rust\nuse crate::config::bindings::{self, MouseBinding};\n```\nThe exact structure and purpose of `MouseBinding` are not shown in the provided code snippet, but it is used as the element type of the `Vec` inside the `MouseBindings` struct.", "golden_doc_uuids": ["e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7"], "golden_chunk_uuids": [["e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", 0]], "golden_documents": [{"uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/mouse.rs", "repo_name": "alacritty/alacritty", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "index": 0, "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"hash_id": "b07d71ee193a427e34f9c39fb90add91ea49b318a00657a90cd201a35c891dee"}}]}], "golden_chunks": [{"doc_uuid": "e42de8de90207977280409db092d558c129d6d22af59b93274b588900fa6b3f7", "index": 0, "content": "use serde::{Deserialize, Deserializer};\n\nuse alacritty_config_derive::{ConfigDeserialize, SerdeReplace};\n\nuse crate::config::bindings::{self, MouseBinding};\nuse crate::config::ui_config;\n\n#[derive(ConfigDeserialize, Default, Clone, Debug, PartialEq, Eq)]\npub struct Mouse {\n pub hide_when_typing: bool,\n pub bindings: MouseBindings,\n}\n\n#[derive(SerdeReplace, Clone, Debug, PartialEq, Eq)]\npub struct MouseBindings(pub Vec);\n\nimpl Default for MouseBindings {\n fn default() -> Self {\n Self(bindings::default_mouse_bindings())\n }\n}\n\nimpl<'de> Deserialize<'de> for MouseBindings {\n fn deserialize(deserializer: D) -> Result\n where\n D: Deserializer<'de>,\n {\n Ok(Self(ui_config::deserialize_bindings(deserializer, Self::default().0)?))\n }\n}\n", "meta": {"hash_id": "b07d71ee193a427e34f9c39fb90add91ea49b318a00657a90cd201a35c891dee"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/mouse.rs", "repo_name": "alacritty/alacritty", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `new_nop` method?", "answer": "The `new_nop` method is used for tests and to handle missing clipboard providers when the crate is built without the \"x11\" feature. It creates a `Clipboard` instance with a `NopClipboardContext` for the `clipboard` field and `None` for the `selection` field. This can be seen in the method definition:\n```rust\n#[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\npub fn new_nop() -> Self {\n Self { clipboard: Box::new(NopClipboardContext::new().unwrap()), selection: None }\n}\n```", "golden_doc_uuids": ["dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578"], "golden_chunk_uuids": [["dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578", 2]], "golden_documents": [{"uuid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578", "content": "use log::{debug, warn};\nuse raw_window_handle::RawDisplayHandle;\n\nuse alacritty_terminal::term::ClipboardType;\n\n#[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\nuse copypasta::nop_clipboard::NopClipboardContext;\n#[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::wayland_clipboard;\n#[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::x11_clipboard::{Primary as X11SelectionClipboard, X11ClipboardContext};\n#[cfg(any(feature = \"x11\", target_os = \"macos\", windows))]\nuse copypasta::ClipboardContext;\nuse copypasta::ClipboardProvider;\n\npub struct Clipboard {\n clipboard: Box,\n selection: Option>,\n}\n\nimpl Clipboard {\n pub unsafe fn new(display: RawDisplayHandle) -> Self {\n match display {\n #[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\n RawDisplayHandle::Wayland(display) => {\n let (selection, clipboard) =\n wayland_clipboard::create_clipboards_from_external(display.display);\n Self { clipboard: Box::new(clipboard), selection: Some(Box::new(selection)) }\n },\n _ => Self::default(),\n }\n }\n\n /// Used for tests and to handle missing clipboard provider when built without the `x11`\n /// feature.\n #[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\n pub fn new_nop() -> Self {\n Self { clipboard: Box::new(NopClipboardContext::new().unwrap()), selection: None }\n }\n}\n\nimpl Default for Clipboard {\n fn default() -> Self {\n #[cfg(any(target_os = \"macos\", windows))]\n return Self { clipboard: Box::new(ClipboardContext::new().unwrap()), selection: None };\n\n #[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\n return Self {\n clipboard: Box::new(ClipboardContext::new().unwrap()),\n selection: Some(Box::new(X11ClipboardContext::::new().unwrap())),\n };\n\n #[cfg(not(any(feature = \"x11\", target_os = \"macos\", windows)))]\n return Self::new_nop();\n }\n}\n\nimpl Clipboard {\n pub fn store(&mut self, ty: ClipboardType, text: impl Into) {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n (ClipboardType::Selection, None) => return,\n _ => &mut self.clipboard,\n };\n\n clipboard.set_contents(text.into()).unwrap_or_else(|err| {\n warn!(\"Unable to store text in clipboard: {}\", err);\n });\n }\n\n pub fn load(&mut self, ty: ClipboardType) -> String {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n _ => &mut self.clipboard,\n };\n\n match clipboard.get_contents() {\n Err(err) => {\n debug!(\"Unable to load text from clipboard: {}\", err);\n String::new()\n },\n Ok(text) => text,\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/clipboard.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578", "index": 0, "content": "use log::{debug, warn};\nuse raw_window_handle::RawDisplayHandle;\n\nuse alacritty_terminal::term::ClipboardType;\n\n#[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\nuse copypasta::nop_clipboard::NopClipboardContext;\n#[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::wayland_clipboard;\n#[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\nuse copypasta::x11_clipboard::{Primary as X11SelectionClipboard, X11ClipboardContext};\n#[cfg(any(feature = \"x11\", target_os = \"macos\", windows))]\nuse copypasta::ClipboardContext;\nuse copypasta::ClipboardProvider;\n\n", "meta": {"hash_id": "9e4893ff2a7879e27437774a6a34d35d2890522ffc1783f7ec69548ec5bfabf3"}}, {"doc_uuid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578", "index": 1, "content": "pub struct Clipboard {\n clipboard: Box,\n selection: Option>,\n}\n\nimpl Clipboard {\n pub unsafe fn new(display: RawDisplayHandle) -> Self {\n match display {\n #[cfg(all(feature = \"wayland\", not(any(target_os = \"macos\", windows))))]\n RawDisplayHandle::Wayland(display) => {\n let (selection, clipboard) =\n wayland_clipboard::create_clipboards_from_external(display.display);\n Self { clipboard: Box::new(clipboard), selection: Some(Box::new(selection)) }\n },\n _ => Self::default(),\n }\n }\n\n", "meta": {"hash_id": "1b2b631626fd685f5ca6f4b2b4e6280a4dc71107233cd646074a1ccf2f0e0c92"}}, {"doc_uuid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578", "index": 2, "content": " /// Used for tests and to handle missing clipboard provider when built without the `x11`\n /// feature.\n #[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\n pub fn new_nop() -> Self {\n Self { clipboard: Box::new(NopClipboardContext::new().unwrap()), selection: None }\n }\n}\n\nimpl Default for Clipboard {\n fn default() -> Self {\n #[cfg(any(target_os = \"macos\", windows))]\n return Self { clipboard: Box::new(ClipboardContext::new().unwrap()), selection: None };\n\n", "meta": {"hash_id": "c73408fc1577309257c33563f422404cd9e5a17004e817a539530f9841e89f89"}}, {"doc_uuid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578", "index": 3, "content": " #[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\n return Self {\n clipboard: Box::new(ClipboardContext::new().unwrap()),\n selection: Some(Box::new(X11ClipboardContext::::new().unwrap())),\n };\n\n #[cfg(not(any(feature = \"x11\", target_os = \"macos\", windows)))]\n return Self::new_nop();\n }\n}\n\nimpl Clipboard {\n pub fn store(&mut self, ty: ClipboardType, text: impl Into) {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n (ClipboardType::Selection, None) => return,\n _ => &mut self.clipboard,\n };\n\n", "meta": {"hash_id": "b7dfd0b7c9961971902124fabc327a2355c86f3a11acb00aa2ae11fd4d146a05"}}, {"doc_uuid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578", "index": 4, "content": " clipboard.set_contents(text.into()).unwrap_or_else(|err| {\n warn!(\"Unable to store text in clipboard: {}\", err);\n });\n }\n\n pub fn load(&mut self, ty: ClipboardType) -> String {\n let clipboard = match (ty, &mut self.selection) {\n (ClipboardType::Selection, Some(provider)) => provider,\n _ => &mut self.clipboard,\n };\n\n match clipboard.get_contents() {\n Err(err) => {\n debug!(\"Unable to load text from clipboard: {}\", err);\n String::new()\n },\n Ok(text) => text,\n }\n }\n}\n", "meta": {"hash_id": "842184dad4f9deb026038766668495cf6934cd5c4662519d90d91d10234c2830"}}]}], "golden_chunks": [{"doc_uuid": "dda576d7cb16763f392463733d36dc4b71bca1b3b20c410732652db82fc54578", "index": 2, "content": " /// Used for tests and to handle missing clipboard provider when built without the `x11`\n /// feature.\n #[cfg(any(test, not(any(feature = \"x11\", target_os = \"macos\", windows))))]\n pub fn new_nop() -> Self {\n Self { clipboard: Box::new(NopClipboardContext::new().unwrap()), selection: None }\n }\n}\n\nimpl Default for Clipboard {\n fn default() -> Self {\n #[cfg(any(target_os = \"macos\", windows))]\n return Self { clipboard: Box::new(ClipboardContext::new().unwrap()), selection: None };\n\n", "meta": {"hash_id": "c73408fc1577309257c33563f422404cd9e5a17004e817a539530f9841e89f89"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/clipboard.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How does the Scheduler schedule a new event?", "answer": "The Scheduler provides a schedule method to schedule a new event. It takes the event, interval, repeat flag, and timer_id as parameters. The method calculates the deadline by adding the interval to the current instant. It then finds the appropriate position in the timers queue to insert the new timer based on the deadline. If repeat is true, the interval is stored in the Timer struct for automatic event repetition.", "golden_doc_uuids": ["2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54"], "golden_chunk_uuids": [["2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", 2]], "golden_documents": [{"uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/scheduler.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 0, "content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n", "meta": {"hash_id": "aa0e1abb65149a6f51a728f0084277bc7357283e303a67ab8d2222e3a8008ff0"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 1, "content": " interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n", "meta": {"hash_id": "4f200f1b70fd778d55d42740aa8a87f3093280fc41bbe3b58b6a8b56c28cdc8f"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 2, "content": " while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n", "meta": {"hash_id": "99859196f4ea7e5e88915dea98d49ec6ec8cb9878a252184300afa63ffbc9aee"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 3, "content": " // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n", "meta": {"hash_id": "741abd5967053867beaa1314dd8137b2582efe7e72217e5b92ba6fe546d9e5b8"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 4, "content": " /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n", "meta": {"hash_id": "21529aea2a855b520a12a81dc43376100565afca4655d818c814e3b062ff4011"}}]}], "golden_chunks": [{"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 2, "content": " while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n", "meta": {"hash_id": "99859196f4ea7e5e88915dea98d49ec6ec8cb9878a252184300afa63ffbc9aee"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/scheduler.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How can you cancel a scheduled event?", "answer": "The Scheduler provides an unschedule method to cancel a scheduled event. It takes the timer_id as a parameter and searches for the corresponding timer in the timers queue using the timer.id. If a matching timer is found, it is removed from the queue and returned. If no matching timer is found, None is returned.", "golden_doc_uuids": ["2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54"], "golden_chunk_uuids": [["2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", 4]], "golden_documents": [{"uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/scheduler.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 0, "content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n", "meta": {"hash_id": "aa0e1abb65149a6f51a728f0084277bc7357283e303a67ab8d2222e3a8008ff0"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 1, "content": " interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n", "meta": {"hash_id": "4f200f1b70fd778d55d42740aa8a87f3093280fc41bbe3b58b6a8b56c28cdc8f"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 2, "content": " while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n", "meta": {"hash_id": "99859196f4ea7e5e88915dea98d49ec6ec8cb9878a252184300afa63ffbc9aee"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 3, "content": " // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n", "meta": {"hash_id": "741abd5967053867beaa1314dd8137b2582efe7e72217e5b92ba6fe546d9e5b8"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 4, "content": " /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n", "meta": {"hash_id": "21529aea2a855b520a12a81dc43376100565afca4655d818c814e3b062ff4011"}}]}], "golden_chunks": [{"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 4, "content": " /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n", "meta": {"hash_id": "21529aea2a855b520a12a81dc43376100565afca4655d818c814e3b062ff4011"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/scheduler.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "How can you check if a timer is already scheduled?", "answer": "The Scheduler provides a scheduled method to check if a timer with a specific TimerId is already scheduled. It takes the id as a parameter and iterates through the timers queue using timers.iter(). It returns true if any timer's id matches the provided id, indicating that the timer is already scheduled. Otherwise, it returns false.", "golden_doc_uuids": ["2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54"], "golden_chunk_uuids": [["2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", 3], ["2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", 2], ["2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", 1], ["2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", 0]], "golden_documents": [{"uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/scheduler.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 0, "content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n", "meta": {"hash_id": "aa0e1abb65149a6f51a728f0084277bc7357283e303a67ab8d2222e3a8008ff0"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 1, "content": " interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n", "meta": {"hash_id": "4f200f1b70fd778d55d42740aa8a87f3093280fc41bbe3b58b6a8b56c28cdc8f"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 2, "content": " while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n", "meta": {"hash_id": "99859196f4ea7e5e88915dea98d49ec6ec8cb9878a252184300afa63ffbc9aee"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 3, "content": " // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n", "meta": {"hash_id": "741abd5967053867beaa1314dd8137b2582efe7e72217e5b92ba6fe546d9e5b8"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 4, "content": " /// Cancel a scheduled event.\n pub fn unschedule(&mut self, id: TimerId) -> Option {\n let index = self.timers.iter().position(|timer| timer.id == id)?;\n self.timers.remove(index)\n }\n\n /// Check if a timer is already scheduled.\n pub fn scheduled(&mut self, id: TimerId) -> bool {\n self.timers.iter().any(|timer| timer.id == id)\n }\n\n /// Remove all timers scheduled for a window.\n ///\n /// This must be called when a window is removed to ensure that timers on intervals do not\n /// stick around forever and cause a memory leak.\n pub fn unschedule_window(&mut self, window_id: WindowId) {\n self.timers.retain(|timer| timer.id.window_id != window_id);\n }\n}\n", "meta": {"hash_id": "21529aea2a855b520a12a81dc43376100565afca4655d818c814e3b062ff4011"}}]}], "golden_chunks": [{"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 3, "content": " // Get insert position in the schedule.\n let index = self\n .timers\n .iter()\n .position(|timer| timer.deadline > deadline)\n .unwrap_or(self.timers.len());\n\n // Set the automatic event repeat rate.\n let interval = if repeat { Some(interval) } else { None };\n\n self.timers.insert(index, Timer { interval, deadline, event, id: timer_id });\n }\n\n", "meta": {"hash_id": "741abd5967053867beaa1314dd8137b2582efe7e72217e5b92ba6fe546d9e5b8"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 2, "content": " while !self.timers.is_empty() && self.timers[0].deadline <= now {\n if let Some(timer) = self.timers.pop_front() {\n // Automatically repeat the event.\n if let Some(interval) = timer.interval {\n self.schedule(timer.event.clone(), interval, true, timer.id);\n }\n\n let _ = self.event_proxy.send_event(timer.event);\n }\n }\n\n self.timers.front().map(|timer| timer.deadline)\n }\n\n /// Schedule a new event.\n pub fn schedule(&mut self, event: Event, interval: Duration, repeat: bool, timer_id: TimerId) {\n let deadline = Instant::now() + interval;\n\n", "meta": {"hash_id": "99859196f4ea7e5e88915dea98d49ec6ec8cb9878a252184300afa63ffbc9aee"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 1, "content": " interval: Option,\n}\n\n/// Scheduler tracking all pending timers.\npub struct Scheduler {\n timers: VecDeque,\n event_proxy: EventLoopProxy,\n}\n\nimpl Scheduler {\n pub fn new(event_proxy: EventLoopProxy) -> Self {\n Self { timers: VecDeque::new(), event_proxy }\n }\n\n /// Process all pending timers.\n ///\n /// If there are still timers pending after all ready events have been processed, the closest\n /// pending deadline will be returned.\n pub fn update(&mut self) -> Option {\n let now = Instant::now();\n\n", "meta": {"hash_id": "4f200f1b70fd778d55d42740aa8a87f3093280fc41bbe3b58b6a8b56c28cdc8f"}}, {"doc_uuid": "2fc7d7be4454fcb111754643168b4be9ac632f43826883ec917f5698c5a15f54", "index": 0, "content": "//! Scheduler for emitting events at a specific time in the future.\n\nuse std::collections::VecDeque;\nuse std::time::{Duration, Instant};\n\nuse winit::event_loop::EventLoopProxy;\nuse winit::window::WindowId;\n\nuse crate::event::Event;\n\n/// ID uniquely identifying a timer.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub struct TimerId {\n topic: Topic,\n window_id: WindowId,\n}\n\nimpl TimerId {\n pub fn new(topic: Topic, window_id: WindowId) -> Self {\n Self { topic, window_id }\n }\n}\n\n/// Available timer topics.\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\npub enum Topic {\n SelectionScrolling,\n DelayedSearch,\n BlinkCursor,\n BlinkTimeout,\n Frame,\n}\n\n/// Event scheduled to be emitted at a specific time.\npub struct Timer {\n pub deadline: Instant,\n pub event: Event,\n pub id: TimerId,\n\n", "meta": {"hash_id": "aa0e1abb65149a6f51a728f0084277bc7357283e303a67ab8d2222e3a8008ff0"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/scheduler.rs", "repo_name": "alacritty/alacritty", "num_chunks": 5, "source": "codebases/jsonl"}} +{"query": "What does the `attach_handler` function do?", "answer": "The `attach_handler` function installs a custom panic handler using `panic::set_hook`. This panic handler renders the panic message in a Windows error dialog box using the `MessageBoxW` function from the Windows API, and also writes the panic message to `stderr`.", "golden_doc_uuids": ["6e284600be25c8833b866ef0ebfab953a8d0a0f8420cfe56fa17e28664de1b82"], "golden_chunk_uuids": [["6e284600be25c8833b866ef0ebfab953a8d0a0f8420cfe56fa17e28664de1b82", 0]], "golden_documents": [{"uuid": "6e284600be25c8833b866ef0ebfab953a8d0a0f8420cfe56fa17e28664de1b82", "content": "use std::io::Write;\nuse std::{io, panic};\n\nuse windows_sys::Win32::UI::WindowsAndMessaging::{\n MessageBoxW, MB_ICONERROR, MB_OK, MB_SETFOREGROUND, MB_TASKMODAL,\n};\n\nuse alacritty_terminal::tty::windows::win32_string;\n\n// Install a panic handler that renders the panic in a classical Windows error\n// dialog box as well as writes the panic to STDERR.\npub fn attach_handler() {\n panic::set_hook(Box::new(|panic_info| {\n let _ = writeln!(io::stderr(), \"{}\", panic_info);\n let msg = format!(\"{}\\n\\nPress Ctrl-C to Copy\", panic_info);\n unsafe {\n MessageBoxW(\n 0isize,\n win32_string(&msg).as_ptr(),\n win32_string(\"Alacritty: Runtime Error\").as_ptr(),\n MB_ICONERROR | MB_OK | MB_SETFOREGROUND | MB_TASKMODAL,\n );\n }\n }));\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/panic.rs", "repo_name": "alacritty/alacritty", "num_chunks": 1, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "6e284600be25c8833b866ef0ebfab953a8d0a0f8420cfe56fa17e28664de1b82", "index": 0, "content": "use std::io::Write;\nuse std::{io, panic};\n\nuse windows_sys::Win32::UI::WindowsAndMessaging::{\n MessageBoxW, MB_ICONERROR, MB_OK, MB_SETFOREGROUND, MB_TASKMODAL,\n};\n\nuse alacritty_terminal::tty::windows::win32_string;\n\n// Install a panic handler that renders the panic in a classical Windows error\n// dialog box as well as writes the panic to STDERR.\npub fn attach_handler() {\n panic::set_hook(Box::new(|panic_info| {\n let _ = writeln!(io::stderr(), \"{}\", panic_info);\n let msg = format!(\"{}\\n\\nPress Ctrl-C to Copy\", panic_info);\n unsafe {\n MessageBoxW(\n 0isize,\n win32_string(&msg).as_ptr(),\n win32_string(\"Alacritty: Runtime Error\").as_ptr(),\n MB_ICONERROR | MB_OK | MB_SETFOREGROUND | MB_TASKMODAL,\n );\n }\n }));\n}\n", "meta": {"hash_id": "1e1cf7e91f6a19e56c1bcb1b69b16afb6f24ff910aad80020487138823ef5b82"}}]}], "golden_chunks": [{"doc_uuid": "6e284600be25c8833b866ef0ebfab953a8d0a0f8420cfe56fa17e28664de1b82", "index": 0, "content": "use std::io::Write;\nuse std::{io, panic};\n\nuse windows_sys::Win32::UI::WindowsAndMessaging::{\n MessageBoxW, MB_ICONERROR, MB_OK, MB_SETFOREGROUND, MB_TASKMODAL,\n};\n\nuse alacritty_terminal::tty::windows::win32_string;\n\n// Install a panic handler that renders the panic in a classical Windows error\n// dialog box as well as writes the panic to STDERR.\npub fn attach_handler() {\n panic::set_hook(Box::new(|panic_info| {\n let _ = writeln!(io::stderr(), \"{}\", panic_info);\n let msg = format!(\"{}\\n\\nPress Ctrl-C to Copy\", panic_info);\n unsafe {\n MessageBoxW(\n 0isize,\n win32_string(&msg).as_ptr(),\n win32_string(\"Alacritty: Runtime Error\").as_ptr(),\n MB_ICONERROR | MB_OK | MB_SETFOREGROUND | MB_TASKMODAL,\n );\n }\n }));\n}\n", "meta": {"hash_id": "1e1cf7e91f6a19e56c1bcb1b69b16afb6f24ff910aad80020487138823ef5b82"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/panic.rs", "repo_name": "alacritty/alacritty", "num_chunks": 1, "source": "codebases/jsonl"}} +{"query": "How do you create a new `Row` instance?", "answer": "You can create a new `Row` instance using the `new` method:\n```rust\nimpl Row {\n pub fn new(columns: usize) -> Row {\n // ...\n }\n}\n```\nIt takes the number of columns as an argument and returns a new `Row` instance with the specified number of columns, initialized with default values of type `T`.", "golden_doc_uuids": ["c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb"], "golden_chunk_uuids": [["c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", 1]], "golden_documents": [{"uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "content": "//! Defines the Row type which makes up lines in the grid.\n\nuse std::cmp::{max, min};\nuse std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};\nuse std::{ptr, slice};\n\n#[cfg(feature = \"serde\")]\nuse serde::{Deserialize, Serialize};\n\nuse crate::grid::GridCell;\nuse crate::index::Column;\nuse crate::term::cell::ResetDiscriminant;\n\n/// A row in the grid.\n#[derive(Default, Clone, Debug)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct Row {\n inner: Vec,\n\n /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n let mut inner: Vec = Vec::with_capacity(columns);\n\n // This is a slightly optimized version of `std::vec::Vec::resize`.\n unsafe {\n let mut ptr = inner.as_mut_ptr();\n\n for _ in 1..columns {\n ptr::write(ptr, T::default());\n ptr = ptr.offset(1);\n }\n ptr::write(ptr, T::default());\n\n inner.set_len(columns);\n }\n\n Row { inner, occ: 0 }\n }\n\n /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n /// Reset all cells in the row to the `template` cell.\n #[inline]\n pub fn reset(&mut self, template: &T)\n where\n T: ResetDiscriminant + GridCell,\n D: PartialEq,\n {\n debug_assert!(!self.inner.is_empty());\n\n // Mark all cells as dirty if template cell changed.\n let len = self.inner.len();\n if self.inner[len - 1].discriminant() != template.discriminant() {\n self.occ = len;\n }\n\n // Reset every dirty cell in the row.\n for item in &mut self.inner[0..self.occ] {\n item.reset(template);\n }\n\n self.occ = 0;\n }\n}\n\n#[allow(clippy::len_without_is_empty)]\nimpl Row {\n #[inline]\n pub fn from_vec(vec: Vec, occ: usize) -> Row {\n Row { inner: vec, occ }\n }\n\n #[inline]\n pub fn len(&self) -> usize {\n self.inner.len()\n }\n\n #[inline]\n pub fn last(&self) -> Option<&T> {\n self.inner.last()\n }\n\n #[inline]\n pub fn last_mut(&mut self) -> Option<&mut T> {\n self.occ = self.inner.len();\n self.inner.last_mut()\n }\n\n #[inline]\n pub fn append(&mut self, vec: &mut Vec)\n where\n T: GridCell,\n {\n self.occ += vec.len();\n self.inner.append(vec);\n }\n\n #[inline]\n pub fn append_front(&mut self, mut vec: Vec) {\n self.occ += vec.len();\n\n vec.append(&mut self.inner);\n self.inner = vec;\n }\n\n /// Check if all cells in the row are empty.\n #[inline]\n pub fn is_clear(&self) -> bool\n where\n T: GridCell,\n {\n self.inner.iter().all(GridCell::is_empty)\n }\n\n #[inline]\n pub fn front_split_off(&mut self, at: usize) -> Vec {\n self.occ = self.occ.saturating_sub(at);\n\n let mut split = self.inner.split_off(at);\n std::mem::swap(&mut split, &mut self.inner);\n split\n }\n}\n\nimpl<'a, T> IntoIterator for &'a Row {\n type IntoIter = slice::Iter<'a, T>;\n type Item = &'a T;\n\n #[inline]\n fn into_iter(self) -> slice::Iter<'a, T> {\n self.inner.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Row {\n type IntoIter = slice::IterMut<'a, T>;\n type Item = &'a mut T;\n\n #[inline]\n fn into_iter(self) -> slice::IterMut<'a, T> {\n self.occ = self.len();\n self.inner.iter_mut()\n }\n}\n\nimpl Index for Row {\n type Output = T;\n\n #[inline]\n fn index(&self, index: Column) -> &T {\n &self.inner[index.0]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, index: Column) -> &mut T {\n self.occ = max(self.occ, *index + 1);\n &mut self.inner[index.0]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: Range) -> &[T] {\n &self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: Range) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeTo) -> &[T] {\n &self.inner[..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeTo) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[..(index.end.0)]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeFrom) -> &[T] {\n &self.inner[(index.start.0)..]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeFrom) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[(index.start.0)..]\n }\n}\n\nimpl Index for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, _: RangeFull) -> &[T] {\n &self.inner[..]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, _: RangeFull) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[..]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeToInclusive) -> &[T] {\n &self.inner[..=(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeToInclusive) -> &mut [T] {\n self.occ = max(self.occ, *index.end + 1);\n &mut self.inner[..=(index.end.0)]\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/grid/row.rs", "repo_name": "alacritty/alacritty", "num_chunks": 12, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 0, "content": "//! Defines the Row type which makes up lines in the grid.\n\nuse std::cmp::{max, min};\nuse std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};\nuse std::{ptr, slice};\n\n#[cfg(feature = \"serde\")]\nuse serde::{Deserialize, Serialize};\n\nuse crate::grid::GridCell;\nuse crate::index::Column;\nuse crate::term::cell::ResetDiscriminant;\n\n/// A row in the grid.\n#[derive(Default, Clone, Debug)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct Row {\n inner: Vec,\n\n", "meta": {"hash_id": "206acb6c42ed0bc1f1fc6e6ca07068f15670de8243ab12b918016fe6e441b937"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 1, "content": " /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n", "meta": {"hash_id": "693436a512db494bf3f09eb22c35187a110c59469888afdd32c50e4410f75922"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 2, "content": " let mut inner: Vec = Vec::with_capacity(columns);\n\n // This is a slightly optimized version of `std::vec::Vec::resize`.\n unsafe {\n let mut ptr = inner.as_mut_ptr();\n\n for _ in 1..columns {\n ptr::write(ptr, T::default());\n ptr = ptr.offset(1);\n }\n ptr::write(ptr, T::default());\n\n inner.set_len(columns);\n }\n\n Row { inner, occ: 0 }\n }\n\n", "meta": {"hash_id": "b6f0a31209a99a487d56c6b214b1160fcdd9b7f4f5eabb54945e49c94f58a67c"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 3, "content": " /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n", "meta": {"hash_id": "9d4dbb10ff4a8dc95f9f9a898123474633e98812f57b4f6b7f8225ea487f088b"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 4, "content": " /// Reset all cells in the row to the `template` cell.\n #[inline]\n pub fn reset(&mut self, template: &T)\n where\n T: ResetDiscriminant + GridCell,\n D: PartialEq,\n {\n debug_assert!(!self.inner.is_empty());\n\n // Mark all cells as dirty if template cell changed.\n let len = self.inner.len();\n if self.inner[len - 1].discriminant() != template.discriminant() {\n self.occ = len;\n }\n\n // Reset every dirty cell in the row.\n for item in &mut self.inner[0..self.occ] {\n item.reset(template);\n }\n\n self.occ = 0;\n }\n}\n\n#[allow(clippy::len_without_is_empty)]\nimpl Row {\n #[inline]\n pub fn from_vec(vec: Vec, occ: usize) -> Row {\n Row { inner: vec, occ }\n }\n\n", "meta": {"hash_id": "ce3476267ff1e8bdce313ae074d59d86efe67ff72394dbd2933060bdad6c4421"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 5, "content": " #[inline]\n pub fn len(&self) -> usize {\n self.inner.len()\n }\n\n #[inline]\n pub fn last(&self) -> Option<&T> {\n self.inner.last()\n }\n\n #[inline]\n pub fn last_mut(&mut self) -> Option<&mut T> {\n self.occ = self.inner.len();\n self.inner.last_mut()\n }\n\n #[inline]\n pub fn append(&mut self, vec: &mut Vec)\n where\n T: GridCell,\n {\n self.occ += vec.len();\n self.inner.append(vec);\n }\n\n #[inline]\n pub fn append_front(&mut self, mut vec: Vec) {\n self.occ += vec.len();\n\n vec.append(&mut self.inner);\n self.inner = vec;\n }\n\n", "meta": {"hash_id": "a8b9930f1ca3f8faf3bd4c28ed85ebfd16f3016ee69ee96111ad911c3e3df105"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 6, "content": " /// Check if all cells in the row are empty.\n #[inline]\n pub fn is_clear(&self) -> bool\n where\n T: GridCell,\n {\n self.inner.iter().all(GridCell::is_empty)\n }\n\n #[inline]\n pub fn front_split_off(&mut self, at: usize) -> Vec {\n self.occ = self.occ.saturating_sub(at);\n\n let mut split = self.inner.split_off(at);\n std::mem::swap(&mut split, &mut self.inner);\n split\n }\n}\n\n", "meta": {"hash_id": "6d155c5814866de9be6524f2d89b58339c49f1a8232df5bf8454a4dfa2a1763e"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 7, "content": "impl<'a, T> IntoIterator for &'a Row {\n type IntoIter = slice::Iter<'a, T>;\n type Item = &'a T;\n\n #[inline]\n fn into_iter(self) -> slice::Iter<'a, T> {\n self.inner.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Row {\n type IntoIter = slice::IterMut<'a, T>;\n type Item = &'a mut T;\n\n #[inline]\n fn into_iter(self) -> slice::IterMut<'a, T> {\n self.occ = self.len();\n self.inner.iter_mut()\n }\n}\n\n", "meta": {"hash_id": "3b7ba27682b553036b98a0227b23eda92634709f228c1940728706237e8989f4"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 8, "content": "impl Index for Row {\n type Output = T;\n\n #[inline]\n fn index(&self, index: Column) -> &T {\n &self.inner[index.0]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, index: Column) -> &mut T {\n self.occ = max(self.occ, *index + 1);\n &mut self.inner[index.0]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: Range) -> &[T] {\n &self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: Range) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\n", "meta": {"hash_id": "16f757e7a30a8c31e7df98fc7726841e28992d820bc7876df52cce0e70135585"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 9, "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeTo) -> &[T] {\n &self.inner[..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeTo) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[..(index.end.0)]\n }\n}\n\n", "meta": {"hash_id": "530a12f133814517ef967d682ccaedc7273df2d8af6e4fc55fde884568d28e1f"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 10, "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeFrom) -> &[T] {\n &self.inner[(index.start.0)..]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeFrom) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[(index.start.0)..]\n }\n}\n\nimpl Index for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, _: RangeFull) -> &[T] {\n &self.inner[..]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, _: RangeFull) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[..]\n }\n}\n\n", "meta": {"hash_id": "157220b168d8906bc9d112bce7cf86c44a7888a1a7f5729e15058be216b7c911"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 11, "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeToInclusive) -> &[T] {\n &self.inner[..=(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeToInclusive) -> &mut [T] {\n self.occ = max(self.occ, *index.end + 1);\n &mut self.inner[..=(index.end.0)]\n }\n}\n", "meta": {"hash_id": "4bf7ead5d687d2b9f354eff4d9ba83658d5e68f3179172f49c2e028d5520a1da"}}]}], "golden_chunks": [{"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 1, "content": " /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n", "meta": {"hash_id": "693436a512db494bf3f09eb22c35187a110c59469888afdd32c50e4410f75922"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/grid/row.rs", "repo_name": "alacritty/alacritty", "num_chunks": 12, "source": "codebases/jsonl"}} +{"query": "What does the `grow` method do in the `Row` struct?", "answer": "The `grow` method is used to increase the number of columns in the row:\n```rust\npub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n self.inner.resize_with(columns, T::default);\n}\n```\nIf the current number of columns is already greater than or equal to the specified `columns`, it does nothing. Otherwise, it resizes the `inner` vector to have `columns` number of elements, filling any new elements with the default value of type `T`.", "golden_doc_uuids": ["c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb"], "golden_chunk_uuids": [["c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", 3]], "golden_documents": [{"uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "content": "//! Defines the Row type which makes up lines in the grid.\n\nuse std::cmp::{max, min};\nuse std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};\nuse std::{ptr, slice};\n\n#[cfg(feature = \"serde\")]\nuse serde::{Deserialize, Serialize};\n\nuse crate::grid::GridCell;\nuse crate::index::Column;\nuse crate::term::cell::ResetDiscriminant;\n\n/// A row in the grid.\n#[derive(Default, Clone, Debug)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct Row {\n inner: Vec,\n\n /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n let mut inner: Vec = Vec::with_capacity(columns);\n\n // This is a slightly optimized version of `std::vec::Vec::resize`.\n unsafe {\n let mut ptr = inner.as_mut_ptr();\n\n for _ in 1..columns {\n ptr::write(ptr, T::default());\n ptr = ptr.offset(1);\n }\n ptr::write(ptr, T::default());\n\n inner.set_len(columns);\n }\n\n Row { inner, occ: 0 }\n }\n\n /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n /// Reset all cells in the row to the `template` cell.\n #[inline]\n pub fn reset(&mut self, template: &T)\n where\n T: ResetDiscriminant + GridCell,\n D: PartialEq,\n {\n debug_assert!(!self.inner.is_empty());\n\n // Mark all cells as dirty if template cell changed.\n let len = self.inner.len();\n if self.inner[len - 1].discriminant() != template.discriminant() {\n self.occ = len;\n }\n\n // Reset every dirty cell in the row.\n for item in &mut self.inner[0..self.occ] {\n item.reset(template);\n }\n\n self.occ = 0;\n }\n}\n\n#[allow(clippy::len_without_is_empty)]\nimpl Row {\n #[inline]\n pub fn from_vec(vec: Vec, occ: usize) -> Row {\n Row { inner: vec, occ }\n }\n\n #[inline]\n pub fn len(&self) -> usize {\n self.inner.len()\n }\n\n #[inline]\n pub fn last(&self) -> Option<&T> {\n self.inner.last()\n }\n\n #[inline]\n pub fn last_mut(&mut self) -> Option<&mut T> {\n self.occ = self.inner.len();\n self.inner.last_mut()\n }\n\n #[inline]\n pub fn append(&mut self, vec: &mut Vec)\n where\n T: GridCell,\n {\n self.occ += vec.len();\n self.inner.append(vec);\n }\n\n #[inline]\n pub fn append_front(&mut self, mut vec: Vec) {\n self.occ += vec.len();\n\n vec.append(&mut self.inner);\n self.inner = vec;\n }\n\n /// Check if all cells in the row are empty.\n #[inline]\n pub fn is_clear(&self) -> bool\n where\n T: GridCell,\n {\n self.inner.iter().all(GridCell::is_empty)\n }\n\n #[inline]\n pub fn front_split_off(&mut self, at: usize) -> Vec {\n self.occ = self.occ.saturating_sub(at);\n\n let mut split = self.inner.split_off(at);\n std::mem::swap(&mut split, &mut self.inner);\n split\n }\n}\n\nimpl<'a, T> IntoIterator for &'a Row {\n type IntoIter = slice::Iter<'a, T>;\n type Item = &'a T;\n\n #[inline]\n fn into_iter(self) -> slice::Iter<'a, T> {\n self.inner.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Row {\n type IntoIter = slice::IterMut<'a, T>;\n type Item = &'a mut T;\n\n #[inline]\n fn into_iter(self) -> slice::IterMut<'a, T> {\n self.occ = self.len();\n self.inner.iter_mut()\n }\n}\n\nimpl Index for Row {\n type Output = T;\n\n #[inline]\n fn index(&self, index: Column) -> &T {\n &self.inner[index.0]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, index: Column) -> &mut T {\n self.occ = max(self.occ, *index + 1);\n &mut self.inner[index.0]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: Range) -> &[T] {\n &self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: Range) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeTo) -> &[T] {\n &self.inner[..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeTo) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[..(index.end.0)]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeFrom) -> &[T] {\n &self.inner[(index.start.0)..]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeFrom) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[(index.start.0)..]\n }\n}\n\nimpl Index for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, _: RangeFull) -> &[T] {\n &self.inner[..]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, _: RangeFull) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[..]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeToInclusive) -> &[T] {\n &self.inner[..=(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeToInclusive) -> &mut [T] {\n self.occ = max(self.occ, *index.end + 1);\n &mut self.inner[..=(index.end.0)]\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/grid/row.rs", "repo_name": "alacritty/alacritty", "num_chunks": 12, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 0, "content": "//! Defines the Row type which makes up lines in the grid.\n\nuse std::cmp::{max, min};\nuse std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};\nuse std::{ptr, slice};\n\n#[cfg(feature = \"serde\")]\nuse serde::{Deserialize, Serialize};\n\nuse crate::grid::GridCell;\nuse crate::index::Column;\nuse crate::term::cell::ResetDiscriminant;\n\n/// A row in the grid.\n#[derive(Default, Clone, Debug)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct Row {\n inner: Vec,\n\n", "meta": {"hash_id": "206acb6c42ed0bc1f1fc6e6ca07068f15670de8243ab12b918016fe6e441b937"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 1, "content": " /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n", "meta": {"hash_id": "693436a512db494bf3f09eb22c35187a110c59469888afdd32c50e4410f75922"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 2, "content": " let mut inner: Vec = Vec::with_capacity(columns);\n\n // This is a slightly optimized version of `std::vec::Vec::resize`.\n unsafe {\n let mut ptr = inner.as_mut_ptr();\n\n for _ in 1..columns {\n ptr::write(ptr, T::default());\n ptr = ptr.offset(1);\n }\n ptr::write(ptr, T::default());\n\n inner.set_len(columns);\n }\n\n Row { inner, occ: 0 }\n }\n\n", "meta": {"hash_id": "b6f0a31209a99a487d56c6b214b1160fcdd9b7f4f5eabb54945e49c94f58a67c"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 3, "content": " /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n", "meta": {"hash_id": "9d4dbb10ff4a8dc95f9f9a898123474633e98812f57b4f6b7f8225ea487f088b"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 4, "content": " /// Reset all cells in the row to the `template` cell.\n #[inline]\n pub fn reset(&mut self, template: &T)\n where\n T: ResetDiscriminant + GridCell,\n D: PartialEq,\n {\n debug_assert!(!self.inner.is_empty());\n\n // Mark all cells as dirty if template cell changed.\n let len = self.inner.len();\n if self.inner[len - 1].discriminant() != template.discriminant() {\n self.occ = len;\n }\n\n // Reset every dirty cell in the row.\n for item in &mut self.inner[0..self.occ] {\n item.reset(template);\n }\n\n self.occ = 0;\n }\n}\n\n#[allow(clippy::len_without_is_empty)]\nimpl Row {\n #[inline]\n pub fn from_vec(vec: Vec, occ: usize) -> Row {\n Row { inner: vec, occ }\n }\n\n", "meta": {"hash_id": "ce3476267ff1e8bdce313ae074d59d86efe67ff72394dbd2933060bdad6c4421"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 5, "content": " #[inline]\n pub fn len(&self) -> usize {\n self.inner.len()\n }\n\n #[inline]\n pub fn last(&self) -> Option<&T> {\n self.inner.last()\n }\n\n #[inline]\n pub fn last_mut(&mut self) -> Option<&mut T> {\n self.occ = self.inner.len();\n self.inner.last_mut()\n }\n\n #[inline]\n pub fn append(&mut self, vec: &mut Vec)\n where\n T: GridCell,\n {\n self.occ += vec.len();\n self.inner.append(vec);\n }\n\n #[inline]\n pub fn append_front(&mut self, mut vec: Vec) {\n self.occ += vec.len();\n\n vec.append(&mut self.inner);\n self.inner = vec;\n }\n\n", "meta": {"hash_id": "a8b9930f1ca3f8faf3bd4c28ed85ebfd16f3016ee69ee96111ad911c3e3df105"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 6, "content": " /// Check if all cells in the row are empty.\n #[inline]\n pub fn is_clear(&self) -> bool\n where\n T: GridCell,\n {\n self.inner.iter().all(GridCell::is_empty)\n }\n\n #[inline]\n pub fn front_split_off(&mut self, at: usize) -> Vec {\n self.occ = self.occ.saturating_sub(at);\n\n let mut split = self.inner.split_off(at);\n std::mem::swap(&mut split, &mut self.inner);\n split\n }\n}\n\n", "meta": {"hash_id": "6d155c5814866de9be6524f2d89b58339c49f1a8232df5bf8454a4dfa2a1763e"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 7, "content": "impl<'a, T> IntoIterator for &'a Row {\n type IntoIter = slice::Iter<'a, T>;\n type Item = &'a T;\n\n #[inline]\n fn into_iter(self) -> slice::Iter<'a, T> {\n self.inner.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Row {\n type IntoIter = slice::IterMut<'a, T>;\n type Item = &'a mut T;\n\n #[inline]\n fn into_iter(self) -> slice::IterMut<'a, T> {\n self.occ = self.len();\n self.inner.iter_mut()\n }\n}\n\n", "meta": {"hash_id": "3b7ba27682b553036b98a0227b23eda92634709f228c1940728706237e8989f4"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 8, "content": "impl Index for Row {\n type Output = T;\n\n #[inline]\n fn index(&self, index: Column) -> &T {\n &self.inner[index.0]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, index: Column) -> &mut T {\n self.occ = max(self.occ, *index + 1);\n &mut self.inner[index.0]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: Range) -> &[T] {\n &self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: Range) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\n", "meta": {"hash_id": "16f757e7a30a8c31e7df98fc7726841e28992d820bc7876df52cce0e70135585"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 9, "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeTo) -> &[T] {\n &self.inner[..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeTo) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[..(index.end.0)]\n }\n}\n\n", "meta": {"hash_id": "530a12f133814517ef967d682ccaedc7273df2d8af6e4fc55fde884568d28e1f"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 10, "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeFrom) -> &[T] {\n &self.inner[(index.start.0)..]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeFrom) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[(index.start.0)..]\n }\n}\n\nimpl Index for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, _: RangeFull) -> &[T] {\n &self.inner[..]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, _: RangeFull) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[..]\n }\n}\n\n", "meta": {"hash_id": "157220b168d8906bc9d112bce7cf86c44a7888a1a7f5729e15058be216b7c911"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 11, "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeToInclusive) -> &[T] {\n &self.inner[..=(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeToInclusive) -> &mut [T] {\n self.occ = max(self.occ, *index.end + 1);\n &mut self.inner[..=(index.end.0)]\n }\n}\n", "meta": {"hash_id": "4bf7ead5d687d2b9f354eff4d9ba83658d5e68f3179172f49c2e028d5520a1da"}}]}], "golden_chunks": [{"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 3, "content": " /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n", "meta": {"hash_id": "9d4dbb10ff4a8dc95f9f9a898123474633e98812f57b4f6b7f8225ea487f088b"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/grid/row.rs", "repo_name": "alacritty/alacritty", "num_chunks": 12, "source": "codebases/jsonl"}} +{"query": "How does the `shrink` method work in the `Row` struct?", "answer": "The `shrink` method is used to reduce the number of columns in the row:\n```rust\npub fn shrink(&mut self, columns: usize) -> Option>\nwhere\n T: GridCell,\n{\n if self.inner.len() <= columns {\n return None;\n }\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n self.occ = min(self.occ, columns);\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n}\n```\nIt first checks if the current number of columns is already less than or equal to the specified `columns`. If so, it returns `None`. Otherwise, it splits the `inner` vector at the `columns` index, creating a new vector `new_row` with the removed elements. It then finds the index of the last non-empty cell in `new_row` and truncates it to remove any trailing empty cells. Finally, it updates `self.occ` to the minimum of its current value and `columns`, and returns `Some(new_row)` if `new_row` is not empty, or `None` otherwise.", "golden_doc_uuids": ["c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb"], "golden_chunk_uuids": [["c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", 3]], "golden_documents": [{"uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "content": "//! Defines the Row type which makes up lines in the grid.\n\nuse std::cmp::{max, min};\nuse std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};\nuse std::{ptr, slice};\n\n#[cfg(feature = \"serde\")]\nuse serde::{Deserialize, Serialize};\n\nuse crate::grid::GridCell;\nuse crate::index::Column;\nuse crate::term::cell::ResetDiscriminant;\n\n/// A row in the grid.\n#[derive(Default, Clone, Debug)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct Row {\n inner: Vec,\n\n /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n let mut inner: Vec = Vec::with_capacity(columns);\n\n // This is a slightly optimized version of `std::vec::Vec::resize`.\n unsafe {\n let mut ptr = inner.as_mut_ptr();\n\n for _ in 1..columns {\n ptr::write(ptr, T::default());\n ptr = ptr.offset(1);\n }\n ptr::write(ptr, T::default());\n\n inner.set_len(columns);\n }\n\n Row { inner, occ: 0 }\n }\n\n /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n /// Reset all cells in the row to the `template` cell.\n #[inline]\n pub fn reset(&mut self, template: &T)\n where\n T: ResetDiscriminant + GridCell,\n D: PartialEq,\n {\n debug_assert!(!self.inner.is_empty());\n\n // Mark all cells as dirty if template cell changed.\n let len = self.inner.len();\n if self.inner[len - 1].discriminant() != template.discriminant() {\n self.occ = len;\n }\n\n // Reset every dirty cell in the row.\n for item in &mut self.inner[0..self.occ] {\n item.reset(template);\n }\n\n self.occ = 0;\n }\n}\n\n#[allow(clippy::len_without_is_empty)]\nimpl Row {\n #[inline]\n pub fn from_vec(vec: Vec, occ: usize) -> Row {\n Row { inner: vec, occ }\n }\n\n #[inline]\n pub fn len(&self) -> usize {\n self.inner.len()\n }\n\n #[inline]\n pub fn last(&self) -> Option<&T> {\n self.inner.last()\n }\n\n #[inline]\n pub fn last_mut(&mut self) -> Option<&mut T> {\n self.occ = self.inner.len();\n self.inner.last_mut()\n }\n\n #[inline]\n pub fn append(&mut self, vec: &mut Vec)\n where\n T: GridCell,\n {\n self.occ += vec.len();\n self.inner.append(vec);\n }\n\n #[inline]\n pub fn append_front(&mut self, mut vec: Vec) {\n self.occ += vec.len();\n\n vec.append(&mut self.inner);\n self.inner = vec;\n }\n\n /// Check if all cells in the row are empty.\n #[inline]\n pub fn is_clear(&self) -> bool\n where\n T: GridCell,\n {\n self.inner.iter().all(GridCell::is_empty)\n }\n\n #[inline]\n pub fn front_split_off(&mut self, at: usize) -> Vec {\n self.occ = self.occ.saturating_sub(at);\n\n let mut split = self.inner.split_off(at);\n std::mem::swap(&mut split, &mut self.inner);\n split\n }\n}\n\nimpl<'a, T> IntoIterator for &'a Row {\n type IntoIter = slice::Iter<'a, T>;\n type Item = &'a T;\n\n #[inline]\n fn into_iter(self) -> slice::Iter<'a, T> {\n self.inner.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Row {\n type IntoIter = slice::IterMut<'a, T>;\n type Item = &'a mut T;\n\n #[inline]\n fn into_iter(self) -> slice::IterMut<'a, T> {\n self.occ = self.len();\n self.inner.iter_mut()\n }\n}\n\nimpl Index for Row {\n type Output = T;\n\n #[inline]\n fn index(&self, index: Column) -> &T {\n &self.inner[index.0]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, index: Column) -> &mut T {\n self.occ = max(self.occ, *index + 1);\n &mut self.inner[index.0]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: Range) -> &[T] {\n &self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: Range) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeTo) -> &[T] {\n &self.inner[..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeTo) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[..(index.end.0)]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeFrom) -> &[T] {\n &self.inner[(index.start.0)..]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeFrom) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[(index.start.0)..]\n }\n}\n\nimpl Index for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, _: RangeFull) -> &[T] {\n &self.inner[..]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, _: RangeFull) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[..]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeToInclusive) -> &[T] {\n &self.inner[..=(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeToInclusive) -> &mut [T] {\n self.occ = max(self.occ, *index.end + 1);\n &mut self.inner[..=(index.end.0)]\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/grid/row.rs", "repo_name": "alacritty/alacritty", "num_chunks": 12, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 0, "content": "//! Defines the Row type which makes up lines in the grid.\n\nuse std::cmp::{max, min};\nuse std::ops::{Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo, RangeToInclusive};\nuse std::{ptr, slice};\n\n#[cfg(feature = \"serde\")]\nuse serde::{Deserialize, Serialize};\n\nuse crate::grid::GridCell;\nuse crate::index::Column;\nuse crate::term::cell::ResetDiscriminant;\n\n/// A row in the grid.\n#[derive(Default, Clone, Debug)]\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\npub struct Row {\n inner: Vec,\n\n", "meta": {"hash_id": "206acb6c42ed0bc1f1fc6e6ca07068f15670de8243ab12b918016fe6e441b937"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 1, "content": " /// Maximum number of occupied entries.\n ///\n /// This is the upper bound on the number of elements in the row, which have been modified\n /// since the last reset. All cells after this point are guaranteed to be equal.\n pub(crate) occ: usize,\n}\n\nimpl PartialEq for Row {\n fn eq(&self, other: &Self) -> bool {\n self.inner == other.inner\n }\n}\n\nimpl Row {\n /// Create a new terminal row.\n ///\n /// Ideally the `template` should be `Copy` in all performance sensitive scenarios.\n pub fn new(columns: usize) -> Row {\n debug_assert!(columns >= 1);\n\n", "meta": {"hash_id": "693436a512db494bf3f09eb22c35187a110c59469888afdd32c50e4410f75922"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 2, "content": " let mut inner: Vec = Vec::with_capacity(columns);\n\n // This is a slightly optimized version of `std::vec::Vec::resize`.\n unsafe {\n let mut ptr = inner.as_mut_ptr();\n\n for _ in 1..columns {\n ptr::write(ptr, T::default());\n ptr = ptr.offset(1);\n }\n ptr::write(ptr, T::default());\n\n inner.set_len(columns);\n }\n\n Row { inner, occ: 0 }\n }\n\n", "meta": {"hash_id": "b6f0a31209a99a487d56c6b214b1160fcdd9b7f4f5eabb54945e49c94f58a67c"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 3, "content": " /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n", "meta": {"hash_id": "9d4dbb10ff4a8dc95f9f9a898123474633e98812f57b4f6b7f8225ea487f088b"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 4, "content": " /// Reset all cells in the row to the `template` cell.\n #[inline]\n pub fn reset(&mut self, template: &T)\n where\n T: ResetDiscriminant + GridCell,\n D: PartialEq,\n {\n debug_assert!(!self.inner.is_empty());\n\n // Mark all cells as dirty if template cell changed.\n let len = self.inner.len();\n if self.inner[len - 1].discriminant() != template.discriminant() {\n self.occ = len;\n }\n\n // Reset every dirty cell in the row.\n for item in &mut self.inner[0..self.occ] {\n item.reset(template);\n }\n\n self.occ = 0;\n }\n}\n\n#[allow(clippy::len_without_is_empty)]\nimpl Row {\n #[inline]\n pub fn from_vec(vec: Vec, occ: usize) -> Row {\n Row { inner: vec, occ }\n }\n\n", "meta": {"hash_id": "ce3476267ff1e8bdce313ae074d59d86efe67ff72394dbd2933060bdad6c4421"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 5, "content": " #[inline]\n pub fn len(&self) -> usize {\n self.inner.len()\n }\n\n #[inline]\n pub fn last(&self) -> Option<&T> {\n self.inner.last()\n }\n\n #[inline]\n pub fn last_mut(&mut self) -> Option<&mut T> {\n self.occ = self.inner.len();\n self.inner.last_mut()\n }\n\n #[inline]\n pub fn append(&mut self, vec: &mut Vec)\n where\n T: GridCell,\n {\n self.occ += vec.len();\n self.inner.append(vec);\n }\n\n #[inline]\n pub fn append_front(&mut self, mut vec: Vec) {\n self.occ += vec.len();\n\n vec.append(&mut self.inner);\n self.inner = vec;\n }\n\n", "meta": {"hash_id": "a8b9930f1ca3f8faf3bd4c28ed85ebfd16f3016ee69ee96111ad911c3e3df105"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 6, "content": " /// Check if all cells in the row are empty.\n #[inline]\n pub fn is_clear(&self) -> bool\n where\n T: GridCell,\n {\n self.inner.iter().all(GridCell::is_empty)\n }\n\n #[inline]\n pub fn front_split_off(&mut self, at: usize) -> Vec {\n self.occ = self.occ.saturating_sub(at);\n\n let mut split = self.inner.split_off(at);\n std::mem::swap(&mut split, &mut self.inner);\n split\n }\n}\n\n", "meta": {"hash_id": "6d155c5814866de9be6524f2d89b58339c49f1a8232df5bf8454a4dfa2a1763e"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 7, "content": "impl<'a, T> IntoIterator for &'a Row {\n type IntoIter = slice::Iter<'a, T>;\n type Item = &'a T;\n\n #[inline]\n fn into_iter(self) -> slice::Iter<'a, T> {\n self.inner.iter()\n }\n}\n\nimpl<'a, T> IntoIterator for &'a mut Row {\n type IntoIter = slice::IterMut<'a, T>;\n type Item = &'a mut T;\n\n #[inline]\n fn into_iter(self) -> slice::IterMut<'a, T> {\n self.occ = self.len();\n self.inner.iter_mut()\n }\n}\n\n", "meta": {"hash_id": "3b7ba27682b553036b98a0227b23eda92634709f228c1940728706237e8989f4"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 8, "content": "impl Index for Row {\n type Output = T;\n\n #[inline]\n fn index(&self, index: Column) -> &T {\n &self.inner[index.0]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, index: Column) -> &mut T {\n self.occ = max(self.occ, *index + 1);\n &mut self.inner[index.0]\n }\n}\n\nimpl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: Range) -> &[T] {\n &self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: Range) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[(index.start.0)..(index.end.0)]\n }\n}\n\n", "meta": {"hash_id": "16f757e7a30a8c31e7df98fc7726841e28992d820bc7876df52cce0e70135585"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 9, "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeTo) -> &[T] {\n &self.inner[..(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeTo) -> &mut [T] {\n self.occ = max(self.occ, *index.end);\n &mut self.inner[..(index.end.0)]\n }\n}\n\n", "meta": {"hash_id": "530a12f133814517ef967d682ccaedc7273df2d8af6e4fc55fde884568d28e1f"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 10, "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeFrom) -> &[T] {\n &self.inner[(index.start.0)..]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeFrom) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[(index.start.0)..]\n }\n}\n\nimpl Index for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, _: RangeFull) -> &[T] {\n &self.inner[..]\n }\n}\n\nimpl IndexMut for Row {\n #[inline]\n fn index_mut(&mut self, _: RangeFull) -> &mut [T] {\n self.occ = self.len();\n &mut self.inner[..]\n }\n}\n\n", "meta": {"hash_id": "157220b168d8906bc9d112bce7cf86c44a7888a1a7f5729e15058be216b7c911"}}, {"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 11, "content": "impl Index> for Row {\n type Output = [T];\n\n #[inline]\n fn index(&self, index: RangeToInclusive) -> &[T] {\n &self.inner[..=(index.end.0)]\n }\n}\n\nimpl IndexMut> for Row {\n #[inline]\n fn index_mut(&mut self, index: RangeToInclusive) -> &mut [T] {\n self.occ = max(self.occ, *index.end + 1);\n &mut self.inner[..=(index.end.0)]\n }\n}\n", "meta": {"hash_id": "4bf7ead5d687d2b9f354eff4d9ba83658d5e68f3179172f49c2e028d5520a1da"}}]}], "golden_chunks": [{"doc_uuid": "c630bf795d027fb148a8fb73eaea06efbb510d7c32ae05b7d4c14fc6a9bbe4cb", "index": 3, "content": " /// Increase the number of columns in the row.\n #[inline]\n pub fn grow(&mut self, columns: usize) {\n if self.inner.len() >= columns {\n return;\n }\n\n self.inner.resize_with(columns, T::default);\n }\n\n /// Reduce the number of columns in the row.\n ///\n /// This will return all non-empty cells that were removed.\n pub fn shrink(&mut self, columns: usize) -> Option>\n where\n T: GridCell,\n {\n if self.inner.len() <= columns {\n return None;\n }\n\n // Split off cells for a new row.\n let mut new_row = self.inner.split_off(columns);\n let index = new_row.iter().rposition(|c| !c.is_empty()).map_or(0, |i| i + 1);\n new_row.truncate(index);\n\n self.occ = min(self.occ, columns);\n\n if new_row.is_empty() {\n None\n } else {\n Some(new_row)\n }\n }\n\n", "meta": {"hash_id": "9d4dbb10ff4a8dc95f9f9a898123474633e98812f57b4f6b7f8225ea487f088b"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty_terminal/src/grid/row.rs", "repo_name": "alacritty/alacritty", "num_chunks": 12, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the `Debug` struct?", "answer": "The `Debug` struct contains various debugging options for the Alacritty terminal emulator. It includes fields such as `log_level` for setting the logging level, `print_events` for enabling event printing, `persistent_logging` for keeping the log file after quitting, `render_timer` for showing the render timer, `highlight_damage` for highlighting damage information, `renderer` for specifying the renderer preference, and `prefer_egl` for using EGL as the display API if the platform allows it.", "golden_doc_uuids": ["3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293"], "golden_chunk_uuids": [["3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", 0]], "golden_documents": [{"uuid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", "content": "use log::LevelFilter;\n\nuse alacritty_config_derive::ConfigDeserialize;\n\n/// Debugging options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Debug {\n pub log_level: LevelFilter,\n\n pub print_events: bool,\n\n /// Keep the log file after quitting.\n pub persistent_logging: bool,\n\n /// Should show render timer.\n pub render_timer: bool,\n\n /// Highlight damage information produced by alacritty.\n pub highlight_damage: bool,\n\n /// The renderer alacritty should be using.\n pub renderer: Option,\n\n /// Use EGL as display API if the current platform allows it.\n pub prefer_egl: bool,\n\n /// Record ref test.\n #[config(skip)]\n pub ref_test: bool,\n}\n\nimpl Default for Debug {\n fn default() -> Self {\n Self {\n log_level: LevelFilter::Warn,\n print_events: Default::default(),\n persistent_logging: Default::default(),\n render_timer: Default::default(),\n highlight_damage: Default::default(),\n ref_test: Default::default(),\n renderer: Default::default(),\n prefer_egl: Default::default(),\n }\n }\n}\n\n/// The renderer configuration options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum RendererPreference {\n /// OpenGL 3.3 renderer.\n Glsl3,\n\n /// GLES 2 renderer, with optional extensions like dual source blending.\n Gles2,\n\n /// Pure GLES 2 renderer.\n Gles2Pure,\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/debug.rs", "repo_name": "alacritty/alacritty", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", "index": 0, "content": "use log::LevelFilter;\n\nuse alacritty_config_derive::ConfigDeserialize;\n\n/// Debugging options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Debug {\n pub log_level: LevelFilter,\n\n pub print_events: bool,\n\n /// Keep the log file after quitting.\n pub persistent_logging: bool,\n\n /// Should show render timer.\n pub render_timer: bool,\n\n /// Highlight damage information produced by alacritty.\n pub highlight_damage: bool,\n\n /// The renderer alacritty should be using.\n pub renderer: Option,\n\n /// Use EGL as display API if the current platform allows it.\n pub prefer_egl: bool,\n\n /// Record ref test.\n #[config(skip)]\n pub ref_test: bool,\n}\n\n", "meta": {"hash_id": "0e0aa4f14c44a4e194e82b2b2a3e13653acf63af7b0ed2a40a92750754e90d85"}}, {"doc_uuid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", "index": 1, "content": "impl Default for Debug {\n fn default() -> Self {\n Self {\n log_level: LevelFilter::Warn,\n print_events: Default::default(),\n persistent_logging: Default::default(),\n render_timer: Default::default(),\n highlight_damage: Default::default(),\n ref_test: Default::default(),\n renderer: Default::default(),\n prefer_egl: Default::default(),\n }\n }\n}\n\n/// The renderer configuration options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum RendererPreference {\n /// OpenGL 3.3 renderer.\n Glsl3,\n\n /// GLES 2 renderer, with optional extensions like dual source blending.\n Gles2,\n\n /// Pure GLES 2 renderer.\n Gles2Pure,\n}\n", "meta": {"hash_id": "3b029197fe74768740a4900b8d1b41a39fd561589877e35c78dff4f481154af0"}}]}], "golden_chunks": [{"doc_uuid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", "index": 0, "content": "use log::LevelFilter;\n\nuse alacritty_config_derive::ConfigDeserialize;\n\n/// Debugging options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Debug {\n pub log_level: LevelFilter,\n\n pub print_events: bool,\n\n /// Keep the log file after quitting.\n pub persistent_logging: bool,\n\n /// Should show render timer.\n pub render_timer: bool,\n\n /// Highlight damage information produced by alacritty.\n pub highlight_damage: bool,\n\n /// The renderer alacritty should be using.\n pub renderer: Option,\n\n /// Use EGL as display API if the current platform allows it.\n pub prefer_egl: bool,\n\n /// Record ref test.\n #[config(skip)]\n pub ref_test: bool,\n}\n\n", "meta": {"hash_id": "0e0aa4f14c44a4e194e82b2b2a3e13653acf63af7b0ed2a40a92750754e90d85"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/debug.rs", "repo_name": "alacritty/alacritty", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How can I specify the renderer preference in Alacritty?", "answer": "To specify the renderer preference in Alacritty, you can set the `renderer` field of the `Debug` struct to one of the variants of the `RendererPreference` enum. For example, to use the OpenGL 3.3 renderer, you can set it to `RendererPreference::Glsl3`. By default, the `renderer` field is set to `None` as shown in the default implementation of the `Debug` struct:\n\nimpl Default for Debug {\n fn default() -> Self {\n Self {\n // ...\n renderer: Default::default(),\n // ...\n }\n }\n}\n", "golden_doc_uuids": ["3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293"], "golden_chunk_uuids": [["3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", 0]], "golden_documents": [{"uuid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", "content": "use log::LevelFilter;\n\nuse alacritty_config_derive::ConfigDeserialize;\n\n/// Debugging options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Debug {\n pub log_level: LevelFilter,\n\n pub print_events: bool,\n\n /// Keep the log file after quitting.\n pub persistent_logging: bool,\n\n /// Should show render timer.\n pub render_timer: bool,\n\n /// Highlight damage information produced by alacritty.\n pub highlight_damage: bool,\n\n /// The renderer alacritty should be using.\n pub renderer: Option,\n\n /// Use EGL as display API if the current platform allows it.\n pub prefer_egl: bool,\n\n /// Record ref test.\n #[config(skip)]\n pub ref_test: bool,\n}\n\nimpl Default for Debug {\n fn default() -> Self {\n Self {\n log_level: LevelFilter::Warn,\n print_events: Default::default(),\n persistent_logging: Default::default(),\n render_timer: Default::default(),\n highlight_damage: Default::default(),\n ref_test: Default::default(),\n renderer: Default::default(),\n prefer_egl: Default::default(),\n }\n }\n}\n\n/// The renderer configuration options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum RendererPreference {\n /// OpenGL 3.3 renderer.\n Glsl3,\n\n /// GLES 2 renderer, with optional extensions like dual source blending.\n Gles2,\n\n /// Pure GLES 2 renderer.\n Gles2Pure,\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/debug.rs", "repo_name": "alacritty/alacritty", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", "index": 0, "content": "use log::LevelFilter;\n\nuse alacritty_config_derive::ConfigDeserialize;\n\n/// Debugging options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Debug {\n pub log_level: LevelFilter,\n\n pub print_events: bool,\n\n /// Keep the log file after quitting.\n pub persistent_logging: bool,\n\n /// Should show render timer.\n pub render_timer: bool,\n\n /// Highlight damage information produced by alacritty.\n pub highlight_damage: bool,\n\n /// The renderer alacritty should be using.\n pub renderer: Option,\n\n /// Use EGL as display API if the current platform allows it.\n pub prefer_egl: bool,\n\n /// Record ref test.\n #[config(skip)]\n pub ref_test: bool,\n}\n\n", "meta": {"hash_id": "0e0aa4f14c44a4e194e82b2b2a3e13653acf63af7b0ed2a40a92750754e90d85"}}, {"doc_uuid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", "index": 1, "content": "impl Default for Debug {\n fn default() -> Self {\n Self {\n log_level: LevelFilter::Warn,\n print_events: Default::default(),\n persistent_logging: Default::default(),\n render_timer: Default::default(),\n highlight_damage: Default::default(),\n ref_test: Default::default(),\n renderer: Default::default(),\n prefer_egl: Default::default(),\n }\n }\n}\n\n/// The renderer configuration options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub enum RendererPreference {\n /// OpenGL 3.3 renderer.\n Glsl3,\n\n /// GLES 2 renderer, with optional extensions like dual source blending.\n Gles2,\n\n /// Pure GLES 2 renderer.\n Gles2Pure,\n}\n", "meta": {"hash_id": "3b029197fe74768740a4900b8d1b41a39fd561589877e35c78dff4f481154af0"}}]}], "golden_chunks": [{"doc_uuid": "3b71dd9257861671e20a407ef83e6fe4eaab1996ebaf57369ea40439e0deb293", "index": 0, "content": "use log::LevelFilter;\n\nuse alacritty_config_derive::ConfigDeserialize;\n\n/// Debugging options.\n#[derive(ConfigDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Debug {\n pub log_level: LevelFilter,\n\n pub print_events: bool,\n\n /// Keep the log file after quitting.\n pub persistent_logging: bool,\n\n /// Should show render timer.\n pub render_timer: bool,\n\n /// Highlight damage information produced by alacritty.\n pub highlight_damage: bool,\n\n /// The renderer alacritty should be using.\n pub renderer: Option,\n\n /// Use EGL as display API if the current platform allows it.\n pub prefer_egl: bool,\n\n /// Record ref test.\n #[config(skip)]\n pub ref_test: bool,\n}\n\n", "meta": {"hash_id": "0e0aa4f14c44a4e194e82b2b2a3e13653acf63af7b0ed2a40a92750754e90d85"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/config/debug.rs", "repo_name": "alacritty/alacritty", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the Display struct?", "answer": "The Display struct represents the display subsystem of the application. As stated in the comment at the top of the file: \"The display subsystem including window management, font rasterization, and GPU drawing.\" It wraps a window, font rasterizer, and GPU renderer.", "golden_doc_uuids": ["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b"], "golden_chunk_uuids": [["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", 14]], "golden_documents": [{"uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "content": "//! The display subsystem including window management, font rasterization, and\n//! GPU drawing.\n\nuse std::cmp;\nuse std::fmt::{self, Formatter};\nuse std::mem::{self, ManuallyDrop};\nuse std::num::NonZeroU32;\nuse std::ops::{Deref, DerefMut};\nuse std::time::{Duration, Instant};\n\nuse glutin::context::{NotCurrentContext, PossiblyCurrentContext};\nuse glutin::prelude::*;\nuse glutin::surface::{Surface, SwapInterval, WindowSurface};\n\nuse log::{debug, info};\nuse parking_lot::MutexGuard;\nuse raw_window_handle::RawWindowHandle;\nuse serde::{Deserialize, Serialize};\nuse winit::dpi::PhysicalSize;\nuse winit::keyboard::ModifiersState;\nuse winit::window::CursorIcon;\n\nuse crossfont::{Rasterize, Rasterizer, Size as FontSize};\nuse unicode_width::UnicodeWidthChar;\n\nuse alacritty_terminal::event::{EventListener, OnResize, WindowSize};\nuse alacritty_terminal::grid::Dimensions as TermDimensions;\nuse alacritty_terminal::index::{Column, Direction, Line, Point};\nuse alacritty_terminal::selection::Selection;\nuse alacritty_terminal::term::cell::Flags;\nuse alacritty_terminal::term::{\n self, point_to_viewport, LineDamageBounds, Term, TermDamage, TermMode, MIN_COLUMNS,\n MIN_SCREEN_LINES,\n};\nuse alacritty_terminal::vte::ansi::{CursorShape, NamedColor};\n\nuse crate::config::font::Font;\nuse crate::config::window::Dimensions;\n#[cfg(not(windows))]\nuse crate::config::window::StartupMode;\nuse crate::config::UiConfig;\nuse crate::display::bell::VisualBell;\nuse crate::display::color::{List, Rgb};\nuse crate::display::content::{RenderableContent, RenderableCursor};\nuse crate::display::cursor::IntoRects;\nuse crate::display::damage::{damage_y_to_viewport_y, DamageTracker};\nuse crate::display::hint::{HintMatch, HintState};\nuse crate::display::meter::Meter;\nuse crate::display::window::Window;\nuse crate::event::{Event, EventType, Mouse, SearchState};\nuse crate::message_bar::{MessageBuffer, MessageType};\nuse crate::renderer::rects::{RenderLine, RenderLines, RenderRect};\nuse crate::renderer::{self, GlyphCache, Renderer};\nuse crate::scheduler::{Scheduler, TimerId, Topic};\nuse crate::string::{ShortenDirection, StrShortener};\n\npub mod color;\npub mod content;\npub mod cursor;\npub mod hint;\npub mod window;\n\nmod bell;\nmod damage;\nmod meter;\n\n/// Label for the forward terminal search bar.\nconst FORWARD_SEARCH_LABEL: &str = \"Search: \";\n\n/// Label for the backward terminal search bar.\nconst BACKWARD_SEARCH_LABEL: &str = \"Backward Search: \";\n\n/// The character used to shorten the visible text like uri preview or search regex.\nconst SHORTENER: char = '…';\n\n/// Color which is used to highlight damaged rects when debugging.\nconst DAMAGE_RECT_COLOR: Rgb = Rgb::new(255, 0, 255);\n\n#[derive(Debug)]\npub enum Error {\n /// Error with window management.\n Window(window::Error),\n\n /// Error dealing with fonts.\n Font(crossfont::Error),\n\n /// Error in renderer.\n Render(renderer::Error),\n\n /// Error during context operations.\n Context(glutin::error::Error),\n}\n\nimpl std::error::Error for Error {\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n match self {\n Error::Window(err) => err.source(),\n Error::Font(err) => err.source(),\n Error::Render(err) => err.source(),\n Error::Context(err) => err.source(),\n }\n }\n}\n\nimpl fmt::Display for Error {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n match self {\n Error::Window(err) => err.fmt(f),\n Error::Font(err) => err.fmt(f),\n Error::Render(err) => err.fmt(f),\n Error::Context(err) => err.fmt(f),\n }\n }\n}\n\nimpl From for Error {\n fn from(val: window::Error) -> Self {\n Error::Window(val)\n }\n}\n\nimpl From for Error {\n fn from(val: crossfont::Error) -> Self {\n Error::Font(val)\n }\n}\n\nimpl From for Error {\n fn from(val: renderer::Error) -> Self {\n Error::Render(val)\n }\n}\n\nimpl From for Error {\n fn from(val: glutin::error::Error) -> Self {\n Error::Context(val)\n }\n}\n\n/// Terminal size info.\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]\npub struct SizeInfo {\n /// Terminal window width.\n width: T,\n\n /// Terminal window height.\n height: T,\n\n /// Width of individual cell.\n cell_width: T,\n\n /// Height of individual cell.\n cell_height: T,\n\n /// Horizontal window padding.\n padding_x: T,\n\n /// Vertical window padding.\n padding_y: T,\n\n /// Number of lines in the viewport.\n screen_lines: usize,\n\n /// Number of columns in the viewport.\n columns: usize,\n}\n\nimpl From> for SizeInfo {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n width: size_info.width as u32,\n height: size_info.height as u32,\n cell_width: size_info.cell_width as u32,\n cell_height: size_info.cell_height as u32,\n padding_x: size_info.padding_x as u32,\n padding_y: size_info.padding_y as u32,\n screen_lines: size_info.screen_lines,\n columns: size_info.screen_lines,\n }\n }\n}\n\nimpl From> for WindowSize {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n num_cols: size_info.columns() as u16,\n num_lines: size_info.screen_lines() as u16,\n cell_width: size_info.cell_width() as u16,\n cell_height: size_info.cell_height() as u16,\n }\n }\n}\n\nimpl SizeInfo {\n #[inline]\n pub fn width(&self) -> T {\n self.width\n }\n\n #[inline]\n pub fn height(&self) -> T {\n self.height\n }\n\n #[inline]\n pub fn cell_width(&self) -> T {\n self.cell_width\n }\n\n #[inline]\n pub fn cell_height(&self) -> T {\n self.cell_height\n }\n\n #[inline]\n pub fn padding_x(&self) -> T {\n self.padding_x\n }\n\n #[inline]\n pub fn padding_y(&self) -> T {\n self.padding_y\n }\n}\n\nimpl SizeInfo {\n #[allow(clippy::too_many_arguments)]\n pub fn new(\n width: f32,\n height: f32,\n cell_width: f32,\n cell_height: f32,\n mut padding_x: f32,\n mut padding_y: f32,\n dynamic_padding: bool,\n ) -> SizeInfo {\n if dynamic_padding {\n padding_x = Self::dynamic_padding(padding_x.floor(), width, cell_width);\n padding_y = Self::dynamic_padding(padding_y.floor(), height, cell_height);\n }\n\n let lines = (height - 2. * padding_y) / cell_height;\n let screen_lines = cmp::max(lines as usize, MIN_SCREEN_LINES);\n\n let columns = (width - 2. * padding_x) / cell_width;\n let columns = cmp::max(columns as usize, MIN_COLUMNS);\n\n SizeInfo {\n width,\n height,\n cell_width,\n cell_height,\n padding_x: padding_x.floor(),\n padding_y: padding_y.floor(),\n screen_lines,\n columns,\n }\n }\n\n #[inline]\n pub fn reserve_lines(&mut self, count: usize) {\n self.screen_lines = cmp::max(self.screen_lines.saturating_sub(count), MIN_SCREEN_LINES);\n }\n\n /// Check if coordinates are inside the terminal grid.\n ///\n /// The padding, message bar or search are not counted as part of the grid.\n #[inline]\n pub fn contains_point(&self, x: usize, y: usize) -> bool {\n x <= (self.padding_x + self.columns as f32 * self.cell_width) as usize\n && x > self.padding_x as usize\n && y <= (self.padding_y + self.screen_lines as f32 * self.cell_height) as usize\n && y > self.padding_y as usize\n }\n\n /// Calculate padding to spread it evenly around the terminal content.\n #[inline]\n fn dynamic_padding(padding: f32, dimension: f32, cell_dimension: f32) -> f32 {\n padding + ((dimension - 2. * padding) % cell_dimension) / 2.\n }\n}\n\nimpl TermDimensions for SizeInfo {\n #[inline]\n fn columns(&self) -> usize {\n self.columns\n }\n\n #[inline]\n fn screen_lines(&self) -> usize {\n self.screen_lines\n }\n\n #[inline]\n fn total_lines(&self) -> usize {\n self.screen_lines()\n }\n}\n\n#[derive(Default, Clone, Debug, PartialEq, Eq)]\npub struct DisplayUpdate {\n pub dirty: bool,\n\n dimensions: Option>,\n cursor_dirty: bool,\n font: Option,\n}\n\nimpl DisplayUpdate {\n pub fn dimensions(&self) -> Option> {\n self.dimensions\n }\n\n pub fn font(&self) -> Option<&Font> {\n self.font.as_ref()\n }\n\n pub fn cursor_dirty(&self) -> bool {\n self.cursor_dirty\n }\n\n pub fn set_dimensions(&mut self, dimensions: PhysicalSize) {\n self.dimensions = Some(dimensions);\n self.dirty = true;\n }\n\n pub fn set_font(&mut self, font: Font) {\n self.font = Some(font);\n self.dirty = true;\n }\n\n pub fn set_cursor_dirty(&mut self) {\n self.cursor_dirty = true;\n self.dirty = true;\n }\n}\n\n/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n // Create the GL surface to draw into.\n let surface = renderer::platform::create_gl_surface(\n &gl_context,\n window.inner_size(),\n window.raw_window_handle(),\n )?;\n\n // Make the context current.\n let context = gl_context.make_current(&surface)?;\n\n // Create renderer.\n let mut renderer = Renderer::new(&context, config.debug.renderer)?;\n\n // Load font common glyphs to accelerate rendering.\n debug!(\"Filling glyph cache with common glyphs\");\n renderer.with_loader(|mut api| {\n glyph_cache.reset_glyph_cache(&mut api);\n });\n\n let padding = config.window.padding(window.scale_factor as f32);\n let viewport_size = window.inner_size();\n\n // Create new size with at least one column and row.\n let size_info = SizeInfo::new(\n viewport_size.width as f32,\n viewport_size.height as f32,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding && config.window.dimensions().is_none(),\n );\n\n info!(\"Cell size: {} x {}\", cell_width, cell_height);\n info!(\"Padding: {} x {}\", size_info.padding_x(), size_info.padding_y());\n info!(\"Width: {}, Height: {}\", size_info.width(), size_info.height());\n\n // Update OpenGL projection.\n renderer.resize(&size_info);\n\n // Clear screen.\n let background_color = config.colors.primary.background;\n renderer.clear(background_color, config.window_opacity());\n\n // Disable shadows for transparent windows on macOS.\n #[cfg(target_os = \"macos\")]\n window.set_has_shadow(config.window_opacity() >= 1.0);\n\n let is_wayland = matches!(raw_window_handle, RawWindowHandle::Wayland(_));\n\n // On Wayland we can safely ignore this call, since the window isn't visible until you\n // actually draw something into it and commit those changes.\n if !is_wayland {\n surface.swap_buffers(&context).expect(\"failed to swap buffers.\");\n renderer.finish();\n }\n\n // Set resize increments for the newly created window.\n if config.window.resize_increments {\n window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n window.set_visible(true);\n\n #[allow(clippy::single_match)]\n #[cfg(not(windows))]\n if !_tabbed {\n match config.window.startup_mode {\n #[cfg(target_os = \"macos\")]\n StartupMode::SimpleFullscreen => window.set_simple_fullscreen(true),\n StartupMode::Maximized if !is_wayland => window.set_maximized(true),\n _ => (),\n }\n }\n\n let hint_state = HintState::new(config.hints.alphabet());\n\n let mut damage_tracker = DamageTracker::new(size_info.screen_lines(), size_info.columns());\n damage_tracker.debug = config.debug.highlight_damage;\n\n // Disable vsync.\n if let Err(err) = surface.set_swap_interval(&context, SwapInterval::DontWait) {\n info!(\"Failed to disable vsync: {}\", err);\n }\n\n Ok(Self {\n context: ManuallyDrop::new(Replaceable::new(context)),\n visual_bell: VisualBell::from(&config.bell),\n renderer: ManuallyDrop::new(renderer),\n surface: ManuallyDrop::new(surface),\n colors: List::from(&config.colors),\n frame_timer: FrameTimer::new(),\n raw_window_handle,\n damage_tracker,\n glyph_cache,\n hint_state,\n size_info,\n font_size,\n window,\n pending_renderer_update: Default::default(),\n vi_highlighted_hint: Default::default(),\n highlighted_hint: Default::default(),\n hint_mouse_point: Default::default(),\n pending_update: Default::default(),\n cursor_hidden: Default::default(),\n meter: Default::default(),\n ime: Default::default(),\n })\n }\n\n #[inline]\n pub fn gl_context(&self) -> &PossiblyCurrentContext {\n self.context.get()\n }\n\n pub fn make_not_current(&mut self) {\n if self.context.get().is_current() {\n self.context.replace_with(|context| {\n context\n .make_not_current()\n .expect(\"failed to disable context\")\n .treat_as_possibly_current()\n });\n }\n }\n\n pub fn make_current(&self) {\n if !self.context.get().is_current() {\n self.context.make_current(&self.surface).expect(\"failed to make context current\")\n }\n }\n\n fn swap_buffers(&self) {\n #[allow(clippy::single_match)]\n let res = match (self.surface.deref(), &self.context.get()) {\n #[cfg(not(any(target_os = \"macos\", windows)))]\n (Surface::Egl(surface), PossiblyCurrentContext::Egl(context))\n if matches!(self.raw_window_handle, RawWindowHandle::Wayland(_))\n && !self.damage_tracker.debug =>\n {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n surface.swap_buffers_with_damage(context, &damage)\n },\n (surface, context) => surface.swap_buffers(context),\n };\n if let Err(err) = res {\n debug!(\"error calling swap_buffers: {}\", err);\n }\n }\n\n /// Update font size and cell dimensions.\n ///\n /// This will return a tuple of the cell width and height.\n fn update_font_size(\n glyph_cache: &mut GlyphCache,\n config: &UiConfig,\n font: &Font,\n ) -> (f32, f32) {\n let _ = glyph_cache.update_font_size(font);\n\n // Compute new cell sizes.\n compute_cell_size(config, &glyph_cache.font_metrics())\n }\n\n /// Reset glyph cache.\n fn reset_glyph_cache(&mut self) {\n let cache = &mut self.glyph_cache;\n self.renderer.with_loader(|mut api| {\n cache.reset_glyph_cache(&mut api);\n });\n }\n\n // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n let (mut cell_width, mut cell_height) =\n (self.size_info.cell_width(), self.size_info.cell_height());\n\n if pending_update.font().is_some() || pending_update.cursor_dirty() {\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.clear_font_cache = true\n }\n\n // Update font size and cell dimensions.\n if let Some(font) = pending_update.font() {\n let cell_dimensions = Self::update_font_size(&mut self.glyph_cache, config, font);\n cell_width = cell_dimensions.0;\n cell_height = cell_dimensions.1;\n\n info!(\"Cell size: {} x {}\", cell_width, cell_height);\n\n // Mark entire terminal as damaged since glyph size could change without cell size\n // changes.\n self.damage_tracker.frame().mark_fully_damaged();\n }\n\n let (mut width, mut height) = (self.size_info.width(), self.size_info.height());\n if let Some(dimensions) = pending_update.dimensions() {\n width = dimensions.width as f32;\n height = dimensions.height as f32;\n }\n\n let padding = config.window.padding(self.window.scale_factor as f32);\n\n let mut new_size = SizeInfo::new(\n width,\n height,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding,\n );\n\n // Update number of column/lines in the viewport.\n let search_active = search_state.history_index.is_some();\n let message_bar_lines = message_buffer.message().map_or(0, |m| m.text(&new_size).len());\n let search_lines = usize::from(search_active);\n new_size.reserve_lines(message_bar_lines + search_lines);\n\n // Update resize increments.\n if config.window.resize_increments {\n self.window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n // Resize when terminal when its dimensions have changed.\n if self.size_info.screen_lines() != new_size.screen_lines\n || self.size_info.columns() != new_size.columns()\n {\n // Resize PTY.\n pty_resize_handle.on_resize(new_size.into());\n\n // Resize terminal.\n terminal.resize(new_size);\n\n // Resize damage tracking.\n self.damage_tracker.resize(new_size.screen_lines(), new_size.columns());\n }\n\n // Check if dimensions have changed.\n if new_size != self.size_info {\n // Queue renderer update.\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.resize = true;\n\n // Clear focused search match.\n search_state.clear_focused_match();\n }\n self.size_info = new_size;\n }\n\n // NOTE: Renderer updates are split off, since platforms like Wayland require resize and other\n // OpenGL operations to be performed right before rendering. Otherwise they could lock the\n // back buffer and render with the previous state. This also solves flickering during resizes.\n //\n /// Update the state of the renderer.\n pub fn process_renderer_update(&mut self) {\n let renderer_update = match self.pending_renderer_update.take() {\n Some(renderer_update) => renderer_update,\n _ => return,\n };\n\n // Resize renderer.\n if renderer_update.resize {\n let width = NonZeroU32::new(self.size_info.width() as u32).unwrap();\n let height = NonZeroU32::new(self.size_info.height() as u32).unwrap();\n self.surface.resize(&self.context, width, height);\n }\n\n // Ensure we're modifying the correct OpenGL context.\n self.make_current();\n\n if renderer_update.clear_font_cache {\n self.reset_glyph_cache();\n }\n\n self.renderer.resize(&self.size_info);\n\n info!(\"Padding: {} x {}\", self.size_info.padding_x(), self.size_info.padding_y());\n info!(\"Width: {}, Height: {}\", self.size_info.width(), self.size_info.height());\n }\n\n /// Draw the screen.\n ///\n /// A reference to Term whose state is being drawn must be provided.\n ///\n /// This call may block if vsync is enabled.\n pub fn draw(\n &mut self,\n mut terminal: MutexGuard<'_, Term>,\n scheduler: &mut Scheduler,\n message_buffer: &MessageBuffer,\n config: &UiConfig,\n search_state: &mut SearchState,\n ) {\n // Collect renderable content before the terminal is dropped.\n let mut content = RenderableContent::new(config, self, &terminal, search_state);\n let mut grid_cells = Vec::new();\n for cell in &mut content {\n grid_cells.push(cell);\n }\n let selection_range = content.selection_range();\n let foreground_color = content.color(NamedColor::Foreground as usize);\n let background_color = content.color(NamedColor::Background as usize);\n let display_offset = content.display_offset();\n let cursor = content.cursor();\n\n let cursor_point = terminal.grid().cursor.point;\n let total_lines = terminal.grid().total_lines();\n let metrics = self.glyph_cache.font_metrics();\n let size_info = self.size_info;\n\n let vi_mode = terminal.mode().contains(TermMode::VI);\n let vi_cursor_point = if vi_mode { Some(terminal.vi_mode_cursor.point) } else { None };\n\n // Add damage from the terminal.\n if self.collect_damage() {\n match terminal.damage() {\n TermDamage::Full => self.damage_tracker.frame().mark_fully_damaged(),\n TermDamage::Partial(damaged_lines) => {\n for damage in damaged_lines {\n self.damage_tracker.frame().damage_line(damage);\n }\n },\n }\n terminal.reset_damage();\n }\n\n // Drop terminal as early as possible to free lock.\n drop(terminal);\n\n // Add damage from alacritty's UI elements overlapping terminal.\n if self.collect_damage() {\n let requires_full_damage = self.visual_bell.intensity() != 0.\n || self.hint_state.active()\n || search_state.regex().is_some();\n\n if requires_full_damage {\n self.damage_tracker.frame().mark_fully_damaged();\n self.damage_tracker.next_frame().mark_fully_damaged();\n }\n\n let vi_cursor_viewport_point =\n vi_cursor_point.and_then(|cursor| point_to_viewport(display_offset, cursor));\n\n self.damage_tracker.damage_vi_cursor(vi_cursor_viewport_point);\n self.damage_tracker.damage_selection(selection_range, display_offset);\n }\n\n // Make sure this window's OpenGL context is active.\n self.make_current();\n\n self.renderer.clear(background_color, config.window_opacity());\n let mut lines = RenderLines::new();\n\n // Optimize loop hint comparator.\n let has_highlighted_hint =\n self.highlighted_hint.is_some() || self.vi_highlighted_hint.is_some();\n\n // Draw grid.\n {\n let _sampler = self.meter.sampler();\n\n // Ensure macOS hasn't reset our viewport.\n #[cfg(target_os = \"macos\")]\n self.renderer.set_viewport(&size_info);\n\n let glyph_cache = &mut self.glyph_cache;\n let highlighted_hint = &self.highlighted_hint;\n let vi_highlighted_hint = &self.vi_highlighted_hint;\n let damage_tracker = &mut self.damage_tracker;\n\n self.renderer.draw_cells(\n &size_info,\n glyph_cache,\n grid_cells.into_iter().map(|mut cell| {\n // Underline hints hovered by mouse or vi mode cursor.\n let point = term::viewport_to_point(display_offset, cell.point);\n\n if has_highlighted_hint {\n let hyperlink =\n cell.extra.as_ref().and_then(|extra| extra.hyperlink.as_ref());\n if highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n || vi_highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n {\n cell.flags.insert(Flags::UNDERLINE);\n // Damage hints for the current and next frames.\n damage_tracker.frame().damage_point(cell.point);\n damage_tracker.next_frame().damage_point(cell.point);\n }\n }\n\n // Update underline/strikeout.\n lines.update(&cell);\n\n cell\n }),\n );\n }\n\n let mut rects = lines.rects(&metrics, &size_info);\n\n if let Some(vi_cursor_point) = vi_cursor_point {\n // Indicate vi mode by showing the cursor's position in the top right corner.\n let line = (-vi_cursor_point.line.0 + size_info.bottommost_line().0) as usize;\n let obstructed_column = Some(vi_cursor_point)\n .filter(|point| point.line == -(display_offset as i32))\n .map(|point| point.column);\n self.draw_line_indicator(config, total_lines, obstructed_column, line);\n } else if search_state.regex().is_some() {\n // Show current display offset in vi-less search to indicate match position.\n self.draw_line_indicator(config, total_lines, None, display_offset);\n };\n\n // Draw cursor.\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n\n // Push visual bell after url/underline/strikeout rects.\n let visual_bell_intensity = self.visual_bell.intensity();\n if visual_bell_intensity != 0. {\n let visual_bell_rect = RenderRect::new(\n 0.,\n 0.,\n size_info.width(),\n size_info.height(),\n config.bell.color,\n visual_bell_intensity as f32,\n );\n rects.push(visual_bell_rect);\n }\n\n // Handle IME positioning and search bar rendering.\n let ime_position = match search_state.regex() {\n Some(regex) => {\n let search_label = match search_state.direction() {\n Direction::Right => FORWARD_SEARCH_LABEL,\n Direction::Left => BACKWARD_SEARCH_LABEL,\n };\n\n let search_text = Self::format_search(regex, search_label, size_info.columns());\n\n // Render the search bar.\n self.draw_search(config, &search_text);\n\n // Draw search bar cursor.\n let line = size_info.screen_lines();\n let column = Column(search_text.chars().count() - 1);\n\n // Add cursor to search bar if IME is not active.\n if self.ime.preedit().is_none() {\n let fg = config.colors.footer_bar_foreground();\n let shape = CursorShape::Underline;\n let cursor = RenderableCursor::new(Point::new(line, column), shape, fg, false);\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n }\n\n Some(Point::new(line, column))\n },\n None => {\n let num_lines = self.size_info.screen_lines();\n term::point_to_viewport(display_offset, cursor_point)\n .filter(|point| point.line < num_lines)\n },\n };\n\n // Handle IME.\n if self.ime.is_enabled() {\n if let Some(point) = ime_position {\n let (fg, bg) = if search_state.regex().is_some() {\n (config.colors.footer_bar_foreground(), config.colors.footer_bar_background())\n } else {\n (foreground_color, background_color)\n };\n\n self.draw_ime_preview(point, fg, bg, &mut rects, config);\n }\n }\n\n if let Some(message) = message_buffer.message() {\n let search_offset = usize::from(search_state.regex().is_some());\n let text = message.text(&size_info);\n\n // Create a new rectangle for the background.\n let start_line = size_info.screen_lines() + search_offset;\n let y = size_info.cell_height().mul_add(start_line as f32, size_info.padding_y());\n\n let bg = match message.ty() {\n MessageType::Error => config.colors.normal.red,\n MessageType::Warning => config.colors.normal.yellow,\n };\n\n let x = 0;\n let width = size_info.width() as i32;\n let height = (size_info.height() - y) as i32;\n let message_bar_rect =\n RenderRect::new(x as f32, y, width as f32, height as f32, bg, 1.);\n\n // Push message_bar in the end, so it'll be above all other content.\n rects.push(message_bar_rect);\n\n // Always damage message bar, since it could have messages of the same size in it.\n self.damage_tracker.frame().add_viewport_rect(&size_info, x, y as i32, width, height);\n\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n\n // Relay messages to the user.\n let glyph_cache = &mut self.glyph_cache;\n let fg = config.colors.primary.background;\n for (i, message_text) in text.iter().enumerate() {\n let point = Point::new(start_line + i, Column(0));\n self.renderer.draw_string(\n point,\n fg,\n bg,\n message_text.chars(),\n &size_info,\n glyph_cache,\n );\n }\n } else {\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n }\n\n self.draw_render_timer(config);\n\n // Draw hyperlink uri preview.\n if has_highlighted_hint {\n let cursor_point = vi_cursor_point.or(Some(cursor_point));\n self.draw_hyperlink_preview(config, cursor_point, display_offset);\n }\n\n // Notify winit that we're about to present.\n self.window.pre_present_notify();\n\n // Highlight damage for debugging.\n if self.damage_tracker.debug {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n let mut rects = Vec::with_capacity(damage.len());\n self.highlight_damage(&mut rects);\n self.renderer.draw_rects(&self.size_info, &metrics, rects);\n }\n\n // Clearing debug highlights from the previous frame requires full redraw.\n self.swap_buffers();\n\n if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n /// Update the mouse/vi mode cursor hint highlighting.\n ///\n /// This will return whether the highlighted hints changed.\n pub fn update_highlighted_hints(\n &mut self,\n term: &Term,\n config: &UiConfig,\n mouse: &Mouse,\n modifiers: ModifiersState,\n ) -> bool {\n // Update vi mode cursor hint.\n let vi_highlighted_hint = if term.mode().contains(TermMode::VI) {\n let mods = ModifiersState::all();\n let point = term.vi_mode_cursor.point;\n hint::highlighted_at(term, config, point, mods)\n } else {\n None\n };\n let mut dirty = vi_highlighted_hint != self.vi_highlighted_hint;\n self.vi_highlighted_hint = vi_highlighted_hint;\n\n // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n // Update cursor shape.\n if highlighted_hint.is_some() {\n // If mouse changed the line, we should update the hyperlink preview, since the\n // highlighted hint could be disrupted by the old preview.\n dirty = self.hint_mouse_point.map_or(false, |p| p.line != point.line);\n self.hint_mouse_point = Some(point);\n self.window.set_mouse_cursor(CursorIcon::Pointer);\n } else if self.highlighted_hint.is_some() {\n self.hint_mouse_point = None;\n if term.mode().intersects(TermMode::MOUSE_MODE) && !term.mode().contains(TermMode::VI) {\n self.window.set_mouse_cursor(CursorIcon::Default);\n } else {\n self.window.set_mouse_cursor(CursorIcon::Text);\n }\n }\n\n dirty |= self.highlighted_hint != highlighted_hint;\n self.highlighted_hint = highlighted_hint;\n\n dirty\n }\n\n #[inline(never)]\n fn draw_ime_preview(\n &mut self,\n point: Point,\n fg: Rgb,\n bg: Rgb,\n rects: &mut Vec,\n config: &UiConfig,\n ) {\n let preedit = match self.ime.preedit() {\n Some(preedit) => preedit,\n None => {\n // In case we don't have preedit, just set the popup point.\n self.window.update_ime_position(point, &self.size_info);\n return;\n },\n };\n\n let num_cols = self.size_info.columns();\n\n // Get the visible preedit.\n let visible_text: String = match (preedit.cursor_byte_offset, preedit.cursor_end_offset) {\n (Some(byte_offset), Some(end_offset)) if end_offset > num_cols => StrShortener::new(\n &preedit.text[byte_offset..],\n num_cols,\n ShortenDirection::Right,\n Some(SHORTENER),\n ),\n _ => {\n StrShortener::new(&preedit.text, num_cols, ShortenDirection::Left, Some(SHORTENER))\n },\n }\n .collect();\n\n let visible_len = visible_text.chars().count();\n\n let end = cmp::min(point.column.0 + visible_len, num_cols);\n let start = end.saturating_sub(visible_len);\n\n let start = Point::new(point.line, Column(start));\n let end = Point::new(point.line, Column(end - 1));\n\n let glyph_cache = &mut self.glyph_cache;\n let metrics = glyph_cache.font_metrics();\n\n self.renderer.draw_string(\n start,\n fg,\n bg,\n visible_text.chars(),\n &self.size_info,\n glyph_cache,\n );\n\n // Damage preedit inside the terminal viewport.\n if self.collect_damage() && point.line < self.size_info.screen_lines() {\n let damage = LineDamageBounds::new(start.line, 0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n // Add underline for preedit text.\n let underline = RenderLine { start, end, color: fg };\n rects.extend(underline.rects(Flags::UNDERLINE, &metrics, &self.size_info));\n\n let ime_popup_point = match preedit.cursor_end_offset {\n Some(cursor_end_offset) if cursor_end_offset != 0 => {\n let is_wide = preedit.text[preedit.cursor_byte_offset.unwrap_or_default()..]\n .chars()\n .next()\n .map(|ch| ch.width() == Some(2))\n .unwrap_or_default();\n\n let cursor_column = Column(\n (end.column.0 as isize - cursor_end_offset as isize + 1).max(0) as usize,\n );\n let cursor_point = Point::new(point.line, cursor_column);\n let cursor =\n RenderableCursor::new(cursor_point, CursorShape::HollowBlock, fg, is_wide);\n rects.extend(cursor.rects(&self.size_info, config.cursor.thickness()));\n cursor_point\n },\n _ => end,\n };\n\n self.window.update_ime_position(ime_popup_point, &self.size_info);\n }\n\n /// Format search regex to account for the cursor and fullwidth characters.\n fn format_search(search_regex: &str, search_label: &str, max_width: usize) -> String {\n let label_len = search_label.len();\n\n // Skip `search_regex` formatting if only label is visible.\n if label_len > max_width {\n return search_label[..max_width].to_owned();\n }\n\n // The search string consists of `search_label` + `search_regex` + `cursor`.\n let mut bar_text = String::from(search_label);\n bar_text.extend(StrShortener::new(\n search_regex,\n max_width.wrapping_sub(label_len + 1),\n ShortenDirection::Left,\n Some(SHORTENER),\n ));\n\n // Add place for cursor.\n bar_text.push(' ');\n\n bar_text\n }\n\n /// Draw preview for the currently highlighted `Hyperlink`.\n #[inline(never)]\n fn draw_hyperlink_preview(\n &mut self,\n config: &UiConfig,\n cursor_point: Option,\n display_offset: usize,\n ) {\n let num_cols = self.size_info.columns();\n let uris: Vec<_> = self\n .highlighted_hint\n .iter()\n .chain(&self.vi_highlighted_hint)\n .filter_map(|hint| hint.hyperlink().map(|hyperlink| hyperlink.uri()))\n .map(|uri| StrShortener::new(uri, num_cols, ShortenDirection::Right, Some(SHORTENER)))\n .collect();\n\n if uris.is_empty() {\n return;\n }\n\n // The maximum amount of protected lines including the ones we'll show preview on.\n let max_protected_lines = uris.len() * 2;\n\n // Lines we shouldn't show preview on, because it'll obscure the highlighted hint.\n let mut protected_lines = Vec::with_capacity(max_protected_lines);\n if self.size_info.screen_lines() > max_protected_lines {\n // Prefer to show preview even when it'll likely obscure the highlighted hint, when\n // there's no place left for it.\n protected_lines.push(self.hint_mouse_point.map(|point| point.line));\n protected_lines.push(cursor_point.map(|point| point.line));\n }\n\n // Find the line in viewport we can draw preview on without obscuring protected lines.\n let viewport_bottom = self.size_info.bottommost_line() - Line(display_offset as i32);\n let viewport_top = viewport_bottom - (self.size_info.screen_lines() - 1);\n let uri_lines = (viewport_top.0..=viewport_bottom.0)\n .rev()\n .map(|line| Some(Line(line)))\n .filter_map(|line| {\n if protected_lines.contains(&line) {\n None\n } else {\n protected_lines.push(line);\n line\n }\n })\n .take(uris.len())\n .flat_map(|line| term::point_to_viewport(display_offset, Point::new(line, Column(0))));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n for (uri, point) in uris.into_iter().zip(uri_lines) {\n // Damage the uri preview.\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n\n // Damage the uri preview for the next frame as well.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n self.renderer.draw_string(point, fg, bg, uri, &self.size_info, &mut self.glyph_cache);\n }\n }\n\n /// Draw current search regex.\n #[inline(never)]\n fn draw_search(&mut self, config: &UiConfig, text: &str) {\n // Assure text length is at least num_cols.\n let num_cols = self.size_info.columns();\n let text = format!(\"{:<1$}\", text, num_cols);\n\n let point = Point::new(self.size_info.screen_lines(), Column(0));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n\n self.renderer.draw_string(\n point,\n fg,\n bg,\n text.chars(),\n &self.size_info,\n &mut self.glyph_cache,\n );\n }\n\n /// Draw render timer.\n #[inline(never)]\n fn draw_render_timer(&mut self, config: &UiConfig) {\n if !config.debug.render_timer {\n return;\n }\n\n let timing = format!(\"{:.3} usec\", self.meter.average());\n let point = Point::new(self.size_info.screen_lines().saturating_sub(2), Column(0));\n let fg = config.colors.primary.background;\n let bg = config.colors.normal.red;\n\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, timing.len());\n self.damage_tracker.frame().damage_line(damage);\n // Damage the render timer for the next frame.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, timing.chars(), &self.size_info, glyph_cache);\n }\n\n /// Draw an indicator for the position of a line in history.\n #[inline(never)]\n fn draw_line_indicator(\n &mut self,\n config: &UiConfig,\n total_lines: usize,\n obstructed_column: Option,\n line: usize,\n ) {\n let columns = self.size_info.columns();\n let text = format!(\"[{}/{}]\", line, total_lines - 1);\n let column = Column(self.size_info.columns().saturating_sub(text.len()));\n let point = Point::new(0, column);\n\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, columns - 1);\n self.damage_tracker.frame().damage_line(damage);\n // Damage it on the next frame in case it goes away.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let colors = &config.colors;\n let fg = colors.line_indicator.foreground.unwrap_or(colors.primary.background);\n let bg = colors.line_indicator.background.unwrap_or(colors.primary.foreground);\n\n // Do not render anything if it would obscure the vi mode cursor.\n if obstructed_column.map_or(true, |obstructed_column| obstructed_column < column) {\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, text.chars(), &self.size_info, glyph_cache);\n }\n }\n\n /// Returns `true` if damage information should be collected, `false` otherwise.\n #[inline]\n fn collect_damage(&self) -> bool {\n matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) || self.damage_tracker.debug\n }\n\n /// Highlight damaged rects.\n ///\n /// This function is for debug purposes only.\n fn highlight_damage(&self, render_rects: &mut Vec) {\n for damage_rect in &self.damage_tracker.shape_frame_damage(self.size_info.into()) {\n let x = damage_rect.x as f32;\n let height = damage_rect.height as f32;\n let width = damage_rect.width as f32;\n let y = damage_y_to_viewport_y(&self.size_info, damage_rect) as f32;\n let render_rect = RenderRect::new(x, y, width, height, DAMAGE_RECT_COLOR, 0.5);\n\n render_rects.push(render_rect);\n }\n }\n\n /// Request a new frame for a window on Wayland.\n fn request_frame(&mut self, scheduler: &mut Scheduler) {\n // Mark that we've used a frame.\n self.window.has_frame = false;\n\n // Get the display vblank interval.\n let monitor_vblank_interval = 1_000_000.\n / self\n .window\n .current_monitor()\n .and_then(|monitor| monitor.refresh_rate_millihertz())\n .unwrap_or(60_000) as f64;\n\n // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\nimpl Drop for Display {\n fn drop(&mut self) {\n // Switch OpenGL context before dropping, otherwise objects (like programs) from other\n // contexts might be deleted when dropping renderer.\n self.make_current();\n unsafe {\n ManuallyDrop::drop(&mut self.renderer);\n ManuallyDrop::drop(&mut self.context);\n ManuallyDrop::drop(&mut self.surface);\n }\n }\n}\n\n/// Input method state.\n#[derive(Debug, Default)]\npub struct Ime {\n /// Whether the IME is enabled.\n enabled: bool,\n\n /// Current IME preedit.\n preedit: Option,\n}\n\nimpl Ime {\n #[inline]\n pub fn set_enabled(&mut self, is_enabled: bool) {\n if is_enabled {\n self.enabled = is_enabled\n } else {\n // Clear state when disabling IME.\n *self = Default::default();\n }\n }\n\n #[inline]\n pub fn is_enabled(&self) -> bool {\n self.enabled\n }\n\n #[inline]\n pub fn set_preedit(&mut self, preedit: Option) {\n self.preedit = preedit;\n }\n\n #[inline]\n pub fn preedit(&self) -> Option<&Preedit> {\n self.preedit.as_ref()\n }\n}\n\n#[derive(Debug, Default, PartialEq, Eq)]\npub struct Preedit {\n /// The preedit text.\n text: String,\n\n /// Byte offset for cursor start into the preedit text.\n ///\n /// `None` means that the cursor is invisible.\n cursor_byte_offset: Option,\n\n /// The cursor offset from the end of the preedit in char width.\n cursor_end_offset: Option,\n}\n\nimpl Preedit {\n pub fn new(text: String, cursor_byte_offset: Option) -> Self {\n let cursor_end_offset = if let Some(byte_offset) = cursor_byte_offset {\n // Convert byte offset into char offset.\n let cursor_end_offset =\n text[byte_offset..].chars().fold(0, |acc, ch| acc + ch.width().unwrap_or(1));\n\n Some(cursor_end_offset)\n } else {\n None\n };\n\n Self { text, cursor_byte_offset, cursor_end_offset }\n }\n}\n\n/// Pending renderer updates.\n///\n/// All renderer updates are cached to be applied just before rendering, to avoid platform-specific\n/// rendering issues.\n#[derive(Debug, Default, Copy, Clone)]\npub struct RendererUpdate {\n /// Should resize the window.\n resize: bool,\n\n /// Clear font caches.\n clear_font_cache: bool,\n}\n\n/// Struct for safe in-place replacement.\n///\n/// This struct allows easily replacing struct fields that provide `self -> Self` methods in-place,\n/// without having to deal with constantly unwrapping the underlying [`Option`].\nstruct Replaceable(Option);\n\nimpl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\nimpl Deref for Replaceable {\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n self.get()\n }\n}\n\nimpl DerefMut for Replaceable {\n fn deref_mut(&mut self) -> &mut Self::Target {\n self.get_mut()\n }\n}\n\n/// The frame timer state.\npub struct FrameTimer {\n /// Base timestamp used to compute sync points.\n base: Instant,\n\n /// The last timestamp we synced to.\n last_synced_timestamp: Instant,\n\n /// The refresh rate we've used to compute sync timestamps.\n refresh_interval: Duration,\n}\n\nimpl FrameTimer {\n pub fn new() -> Self {\n let now = Instant::now();\n Self { base: now, last_synced_timestamp: now, refresh_interval: Duration::ZERO }\n }\n\n /// Compute the delay that we should use to achieve the target frame\n /// rate.\n pub fn compute_timeout(&mut self, refresh_interval: Duration) -> Duration {\n let now = Instant::now();\n\n // Handle refresh rate change.\n if self.refresh_interval != refresh_interval {\n self.base = now;\n self.last_synced_timestamp = now;\n self.refresh_interval = refresh_interval;\n return refresh_interval;\n }\n\n let next_frame = self.last_synced_timestamp + self.refresh_interval;\n\n if next_frame < now {\n // Redraw immediately if we haven't drawn in over `refresh_interval` microseconds.\n let elapsed_micros = (now - self.base).as_micros() as u64;\n let refresh_micros = self.refresh_interval.as_micros() as u64;\n self.last_synced_timestamp =\n now - Duration::from_micros(elapsed_micros % refresh_micros);\n Duration::ZERO\n } else {\n // Redraw on the next `refresh_interval` clock tick.\n self.last_synced_timestamp = next_frame;\n next_frame - now\n }\n }\n}\n\n/// Calculate the cell dimensions based on font metrics.\n///\n/// This will return a tuple of the cell width and height.\n#[inline]\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n let offset_x = f64::from(config.font.offset.x);\n let offset_y = f64::from(config.font.offset.y);\n (\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n )\n}\n\n/// Calculate the size of the window given padding, terminal dimensions and cell size.\nfn window_size(\n config: &UiConfig,\n dimensions: Dimensions,\n cell_width: f32,\n cell_height: f32,\n scale_factor: f32,\n) -> PhysicalSize {\n let padding = config.window.padding(scale_factor);\n\n let grid_width = cell_width * dimensions.columns.max(MIN_COLUMNS) as f32;\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n PhysicalSize::new(width as u32, height as u32)\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/display/mod.rs", "repo_name": "alacritty/alacritty", "num_chunks": 83, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 0, "content": "//! The display subsystem including window management, font rasterization, and\n//! GPU drawing.\n\nuse std::cmp;\nuse std::fmt::{self, Formatter};\nuse std::mem::{self, ManuallyDrop};\nuse std::num::NonZeroU32;\nuse std::ops::{Deref, DerefMut};\nuse std::time::{Duration, Instant};\n\nuse glutin::context::{NotCurrentContext, PossiblyCurrentContext};\nuse glutin::prelude::*;\nuse glutin::surface::{Surface, SwapInterval, WindowSurface};\n\nuse log::{debug, info};\nuse parking_lot::MutexGuard;\nuse raw_window_handle::RawWindowHandle;\nuse serde::{Deserialize, Serialize};\nuse winit::dpi::PhysicalSize;\nuse winit::keyboard::ModifiersState;\nuse winit::window::CursorIcon;\n\nuse crossfont::{Rasterize, Rasterizer, Size as FontSize};\nuse unicode_width::UnicodeWidthChar;\n\n", "meta": {"hash_id": "484264198542f621aca7f7e0469f3f5fccb23b1db7ac149238eed2ec5f595fd6"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 1, "content": "use alacritty_terminal::event::{EventListener, OnResize, WindowSize};\nuse alacritty_terminal::grid::Dimensions as TermDimensions;\nuse alacritty_terminal::index::{Column, Direction, Line, Point};\nuse alacritty_terminal::selection::Selection;\nuse alacritty_terminal::term::cell::Flags;\nuse alacritty_terminal::term::{\n self, point_to_viewport, LineDamageBounds, Term, TermDamage, TermMode, MIN_COLUMNS,\n MIN_SCREEN_LINES,\n};\nuse alacritty_terminal::vte::ansi::{CursorShape, NamedColor};\n\n", "meta": {"hash_id": "76d9a8e79ad1411705e13167d40249eed4f8d6364b22acd0869bfe12e7b56338"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 2, "content": "use crate::config::font::Font;\nuse crate::config::window::Dimensions;\n#[cfg(not(windows))]\nuse crate::config::window::StartupMode;\nuse crate::config::UiConfig;\nuse crate::display::bell::VisualBell;\nuse crate::display::color::{List, Rgb};\nuse crate::display::content::{RenderableContent, RenderableCursor};\nuse crate::display::cursor::IntoRects;\nuse crate::display::damage::{damage_y_to_viewport_y, DamageTracker};\nuse crate::display::hint::{HintMatch, HintState};\nuse crate::display::meter::Meter;\nuse crate::display::window::Window;\nuse crate::event::{Event, EventType, Mouse, SearchState};\nuse crate::message_bar::{MessageBuffer, MessageType};\nuse crate::renderer::rects::{RenderLine, RenderLines, RenderRect};\nuse crate::renderer::{self, GlyphCache, Renderer};\nuse crate::scheduler::{Scheduler, TimerId, Topic};\nuse crate::string::{ShortenDirection, StrShortener};\n\n", "meta": {"hash_id": "5658811313267444de24e0bc4280ed6897a88442d59a6d9a9805b10fae348225"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 3, "content": "pub mod color;\npub mod content;\npub mod cursor;\npub mod hint;\npub mod window;\n\nmod bell;\nmod damage;\nmod meter;\n\n/// Label for the forward terminal search bar.\nconst FORWARD_SEARCH_LABEL: &str = \"Search: \";\n\n/// Label for the backward terminal search bar.\nconst BACKWARD_SEARCH_LABEL: &str = \"Backward Search: \";\n\n/// The character used to shorten the visible text like uri preview or search regex.\nconst SHORTENER: char = '…';\n\n/// Color which is used to highlight damaged rects when debugging.\nconst DAMAGE_RECT_COLOR: Rgb = Rgb::new(255, 0, 255);\n\n#[derive(Debug)]\npub enum Error {\n /// Error with window management.\n Window(window::Error),\n\n", "meta": {"hash_id": "e81ad765dd04aa2f33ced2d8e1c335d66fa46e352bbad1d604f622daf6a07275"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 4, "content": " /// Error dealing with fonts.\n Font(crossfont::Error),\n\n /// Error in renderer.\n Render(renderer::Error),\n\n /// Error during context operations.\n Context(glutin::error::Error),\n}\n\nimpl std::error::Error for Error {\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n match self {\n Error::Window(err) => err.source(),\n Error::Font(err) => err.source(),\n Error::Render(err) => err.source(),\n Error::Context(err) => err.source(),\n }\n }\n}\n\n", "meta": {"hash_id": "f9071da0991c1542dcd7921265508b2cbca7f6da227559a28ac1b5703c96a6e9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 5, "content": "impl fmt::Display for Error {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n match self {\n Error::Window(err) => err.fmt(f),\n Error::Font(err) => err.fmt(f),\n Error::Render(err) => err.fmt(f),\n Error::Context(err) => err.fmt(f),\n }\n }\n}\n\nimpl From for Error {\n fn from(val: window::Error) -> Self {\n Error::Window(val)\n }\n}\n\nimpl From for Error {\n fn from(val: crossfont::Error) -> Self {\n Error::Font(val)\n }\n}\n\nimpl From for Error {\n fn from(val: renderer::Error) -> Self {\n Error::Render(val)\n }\n}\n\nimpl From for Error {\n fn from(val: glutin::error::Error) -> Self {\n Error::Context(val)\n }\n}\n\n", "meta": {"hash_id": "c7e78722d16533c3203bf8a963bf81552ccb100fb4e41c7d835e169d92c4286d"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 6, "content": "/// Terminal size info.\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]\npub struct SizeInfo {\n /// Terminal window width.\n width: T,\n\n /// Terminal window height.\n height: T,\n\n /// Width of individual cell.\n cell_width: T,\n\n /// Height of individual cell.\n cell_height: T,\n\n /// Horizontal window padding.\n padding_x: T,\n\n /// Vertical window padding.\n padding_y: T,\n\n /// Number of lines in the viewport.\n screen_lines: usize,\n\n /// Number of columns in the viewport.\n columns: usize,\n}\n\n", "meta": {"hash_id": "42a9f7ba3845fe29a133013618f15cf26f8cbe47a4a9220416f8edf6af47996c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 7, "content": "impl From> for SizeInfo {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n width: size_info.width as u32,\n height: size_info.height as u32,\n cell_width: size_info.cell_width as u32,\n cell_height: size_info.cell_height as u32,\n padding_x: size_info.padding_x as u32,\n padding_y: size_info.padding_y as u32,\n screen_lines: size_info.screen_lines,\n columns: size_info.screen_lines,\n }\n }\n}\n\n", "meta": {"hash_id": "322cbccb60ecbba5bfc46ccdc2a6ecb6e78a7002c0426a282f6371e3946e72bd"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 8, "content": "impl From> for WindowSize {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n num_cols: size_info.columns() as u16,\n num_lines: size_info.screen_lines() as u16,\n cell_width: size_info.cell_width() as u16,\n cell_height: size_info.cell_height() as u16,\n }\n }\n}\n\nimpl SizeInfo {\n #[inline]\n pub fn width(&self) -> T {\n self.width\n }\n\n #[inline]\n pub fn height(&self) -> T {\n self.height\n }\n\n #[inline]\n pub fn cell_width(&self) -> T {\n self.cell_width\n }\n\n #[inline]\n pub fn cell_height(&self) -> T {\n self.cell_height\n }\n\n #[inline]\n pub fn padding_x(&self) -> T {\n self.padding_x\n }\n\n #[inline]\n pub fn padding_y(&self) -> T {\n self.padding_y\n }\n}\n\n", "meta": {"hash_id": "d3da5d4d6a42319a17785d3cb52f22fb2532996de360fb0a70d1a10dc581ef57"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 9, "content": "impl SizeInfo {\n #[allow(clippy::too_many_arguments)]\n pub fn new(\n width: f32,\n height: f32,\n cell_width: f32,\n cell_height: f32,\n mut padding_x: f32,\n mut padding_y: f32,\n dynamic_padding: bool,\n ) -> SizeInfo {\n if dynamic_padding {\n padding_x = Self::dynamic_padding(padding_x.floor(), width, cell_width);\n padding_y = Self::dynamic_padding(padding_y.floor(), height, cell_height);\n }\n\n", "meta": {"hash_id": "fa70b304bccabbf388746322eebdfc35277b964c83b6baedb11d87538395744a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 10, "content": " let lines = (height - 2. * padding_y) / cell_height;\n let screen_lines = cmp::max(lines as usize, MIN_SCREEN_LINES);\n\n let columns = (width - 2. * padding_x) / cell_width;\n let columns = cmp::max(columns as usize, MIN_COLUMNS);\n\n SizeInfo {\n width,\n height,\n cell_width,\n cell_height,\n padding_x: padding_x.floor(),\n padding_y: padding_y.floor(),\n screen_lines,\n columns,\n }\n }\n\n #[inline]\n pub fn reserve_lines(&mut self, count: usize) {\n self.screen_lines = cmp::max(self.screen_lines.saturating_sub(count), MIN_SCREEN_LINES);\n }\n\n", "meta": {"hash_id": "332446484a520ca0002f602db88ae1228f7b7aa8adbf2e83ab0fd1df748d64a3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 11, "content": " /// Check if coordinates are inside the terminal grid.\n ///\n /// The padding, message bar or search are not counted as part of the grid.\n #[inline]\n pub fn contains_point(&self, x: usize, y: usize) -> bool {\n x <= (self.padding_x + self.columns as f32 * self.cell_width) as usize\n && x > self.padding_x as usize\n && y <= (self.padding_y + self.screen_lines as f32 * self.cell_height) as usize\n && y > self.padding_y as usize\n }\n\n", "meta": {"hash_id": "ef5f151414c4d3a796a840d12e29c2593453898ae0a1bf9a3c4402e62494d7b3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 12, "content": " /// Calculate padding to spread it evenly around the terminal content.\n #[inline]\n fn dynamic_padding(padding: f32, dimension: f32, cell_dimension: f32) -> f32 {\n padding + ((dimension - 2. * padding) % cell_dimension) / 2.\n }\n}\n\nimpl TermDimensions for SizeInfo {\n #[inline]\n fn columns(&self) -> usize {\n self.columns\n }\n\n #[inline]\n fn screen_lines(&self) -> usize {\n self.screen_lines\n }\n\n #[inline]\n fn total_lines(&self) -> usize {\n self.screen_lines()\n }\n}\n\n", "meta": {"hash_id": "73e5fe0b56a019ff26ad2381ff4b03e23d7765f4c505ed3d25a49d935c055637"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 13, "content": "#[derive(Default, Clone, Debug, PartialEq, Eq)]\npub struct DisplayUpdate {\n pub dirty: bool,\n\n dimensions: Option>,\n cursor_dirty: bool,\n font: Option,\n}\n\nimpl DisplayUpdate {\n pub fn dimensions(&self) -> Option> {\n self.dimensions\n }\n\n pub fn font(&self) -> Option<&Font> {\n self.font.as_ref()\n }\n\n pub fn cursor_dirty(&self) -> bool {\n self.cursor_dirty\n }\n\n pub fn set_dimensions(&mut self, dimensions: PhysicalSize) {\n self.dimensions = Some(dimensions);\n self.dirty = true;\n }\n\n pub fn set_font(&mut self, font: Font) {\n self.font = Some(font);\n self.dirty = true;\n }\n\n pub fn set_cursor_dirty(&mut self) {\n self.cursor_dirty = true;\n self.dirty = true;\n }\n}\n\n", "meta": {"hash_id": "e4f08dc76384cfa77f446aae70d820d82d9adea80d638f8db7324a8f69d73b5d"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 14, "content": "/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n", "meta": {"hash_id": "3811d9736c47f355a8ad4b929c285e77eb099350e68420142636952a39f2223e"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 15, "content": " /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n", "meta": {"hash_id": "b15f77642e7ceded8462e22b6ac3f81c2205a35d59841fdc3fe6ad8c4ab9f3ae"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 16, "content": " let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n", "meta": {"hash_id": "72b2e89ab1dc3d3689c6c98fec13bb24aadee6692a3837c6580dbc15874b986a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 17, "content": " // Create the GL surface to draw into.\n let surface = renderer::platform::create_gl_surface(\n &gl_context,\n window.inner_size(),\n window.raw_window_handle(),\n )?;\n\n // Make the context current.\n let context = gl_context.make_current(&surface)?;\n\n // Create renderer.\n let mut renderer = Renderer::new(&context, config.debug.renderer)?;\n\n", "meta": {"hash_id": "e835ec5d07329f713115a3c6273e5eb65798fd8f337f30af00c2bd2cca72fde9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 18, "content": " // Load font common glyphs to accelerate rendering.\n debug!(\"Filling glyph cache with common glyphs\");\n renderer.with_loader(|mut api| {\n glyph_cache.reset_glyph_cache(&mut api);\n });\n\n let padding = config.window.padding(window.scale_factor as f32);\n let viewport_size = window.inner_size();\n\n // Create new size with at least one column and row.\n let size_info = SizeInfo::new(\n viewport_size.width as f32,\n viewport_size.height as f32,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding && config.window.dimensions().is_none(),\n );\n\n", "meta": {"hash_id": "91bb3ae5c6b4871b1e845d262a3a43b180f51fb8d6f21ecce16cf8e1c92501f3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 19, "content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n info!(\"Padding: {} x {}\", size_info.padding_x(), size_info.padding_y());\n info!(\"Width: {}, Height: {}\", size_info.width(), size_info.height());\n\n // Update OpenGL projection.\n renderer.resize(&size_info);\n\n // Clear screen.\n let background_color = config.colors.primary.background;\n renderer.clear(background_color, config.window_opacity());\n\n // Disable shadows for transparent windows on macOS.\n #[cfg(target_os = \"macos\")]\n window.set_has_shadow(config.window_opacity() >= 1.0);\n\n let is_wayland = matches!(raw_window_handle, RawWindowHandle::Wayland(_));\n\n", "meta": {"hash_id": "7faf9dbeb3a6455f09d242144a94e725f681ff8c08ea0280f3de2b22f4d55f6c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 20, "content": " // On Wayland we can safely ignore this call, since the window isn't visible until you\n // actually draw something into it and commit those changes.\n if !is_wayland {\n surface.swap_buffers(&context).expect(\"failed to swap buffers.\");\n renderer.finish();\n }\n\n // Set resize increments for the newly created window.\n if config.window.resize_increments {\n window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n window.set_visible(true);\n\n", "meta": {"hash_id": "c5c51f4f8a09dcfa7cd5c7d48a5506f82a3044f317efeafd10d19b70c5ac9b7b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 21, "content": " #[allow(clippy::single_match)]\n #[cfg(not(windows))]\n if !_tabbed {\n match config.window.startup_mode {\n #[cfg(target_os = \"macos\")]\n StartupMode::SimpleFullscreen => window.set_simple_fullscreen(true),\n StartupMode::Maximized if !is_wayland => window.set_maximized(true),\n _ => (),\n }\n }\n\n let hint_state = HintState::new(config.hints.alphabet());\n\n let mut damage_tracker = DamageTracker::new(size_info.screen_lines(), size_info.columns());\n damage_tracker.debug = config.debug.highlight_damage;\n\n // Disable vsync.\n if let Err(err) = surface.set_swap_interval(&context, SwapInterval::DontWait) {\n info!(\"Failed to disable vsync: {}\", err);\n }\n\n", "meta": {"hash_id": "35aa2a8614b0447dfb154a87dfb78adb124b3317914b16156fed58fd3f314225"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 22, "content": " Ok(Self {\n context: ManuallyDrop::new(Replaceable::new(context)),\n visual_bell: VisualBell::from(&config.bell),\n renderer: ManuallyDrop::new(renderer),\n surface: ManuallyDrop::new(surface),\n colors: List::from(&config.colors),\n frame_timer: FrameTimer::new(),\n raw_window_handle,\n damage_tracker,\n glyph_cache,\n hint_state,\n size_info,\n font_size,\n window,\n pending_renderer_update: Default::default(),\n vi_highlighted_hint: Default::default(),\n highlighted_hint: Default::default(),\n hint_mouse_point: Default::default(),\n pending_update: Default::default(),\n cursor_hidden: Default::default(),\n meter: Default::default(),\n ime: Default::default(),\n })\n }\n\n", "meta": {"hash_id": "e768c80e459a073329b21eb0d348a82d396b8fffaf27b945fdeba4d425f7b5ca"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 23, "content": " #[inline]\n pub fn gl_context(&self) -> &PossiblyCurrentContext {\n self.context.get()\n }\n\n pub fn make_not_current(&mut self) {\n if self.context.get().is_current() {\n self.context.replace_with(|context| {\n context\n .make_not_current()\n .expect(\"failed to disable context\")\n .treat_as_possibly_current()\n });\n }\n }\n\n pub fn make_current(&self) {\n if !self.context.get().is_current() {\n self.context.make_current(&self.surface).expect(\"failed to make context current\")\n }\n }\n\n", "meta": {"hash_id": "aa553a1499d68d6e0956646608fa67bb13518dc62eb65df6a3070b35c474038f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 24, "content": " fn swap_buffers(&self) {\n #[allow(clippy::single_match)]\n let res = match (self.surface.deref(), &self.context.get()) {\n #[cfg(not(any(target_os = \"macos\", windows)))]\n (Surface::Egl(surface), PossiblyCurrentContext::Egl(context))\n if matches!(self.raw_window_handle, RawWindowHandle::Wayland(_))\n && !self.damage_tracker.debug =>\n {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n surface.swap_buffers_with_damage(context, &damage)\n },\n (surface, context) => surface.swap_buffers(context),\n };\n if let Err(err) = res {\n debug!(\"error calling swap_buffers: {}\", err);\n }\n }\n\n", "meta": {"hash_id": "b668652e20a43061c67b22b03f56c114a7dc72bd75a21da776c9356ff2154af0"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 25, "content": " /// Update font size and cell dimensions.\n ///\n /// This will return a tuple of the cell width and height.\n fn update_font_size(\n glyph_cache: &mut GlyphCache,\n config: &UiConfig,\n font: &Font,\n ) -> (f32, f32) {\n let _ = glyph_cache.update_font_size(font);\n\n // Compute new cell sizes.\n compute_cell_size(config, &glyph_cache.font_metrics())\n }\n\n /// Reset glyph cache.\n fn reset_glyph_cache(&mut self) {\n let cache = &mut self.glyph_cache;\n self.renderer.with_loader(|mut api| {\n cache.reset_glyph_cache(&mut api);\n });\n }\n\n", "meta": {"hash_id": "fe9e2774ab5dfd564b6579cfb9b1ce85b2015177d221aef32e2d3ae14507cec4"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 26, "content": " // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n", "meta": {"hash_id": "ed1f56505a3374dd2edecc05ad0cee8c469187f9ecc26860492082baefccf9b5"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 27, "content": " let (mut cell_width, mut cell_height) =\n (self.size_info.cell_width(), self.size_info.cell_height());\n\n if pending_update.font().is_some() || pending_update.cursor_dirty() {\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.clear_font_cache = true\n }\n\n // Update font size and cell dimensions.\n if let Some(font) = pending_update.font() {\n let cell_dimensions = Self::update_font_size(&mut self.glyph_cache, config, font);\n cell_width = cell_dimensions.0;\n cell_height = cell_dimensions.1;\n\n", "meta": {"hash_id": "79e8568f712841aa209157ef132823f1306f62017a353e68b6113b714b554136"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 28, "content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n\n // Mark entire terminal as damaged since glyph size could change without cell size\n // changes.\n self.damage_tracker.frame().mark_fully_damaged();\n }\n\n let (mut width, mut height) = (self.size_info.width(), self.size_info.height());\n if let Some(dimensions) = pending_update.dimensions() {\n width = dimensions.width as f32;\n height = dimensions.height as f32;\n }\n\n", "meta": {"hash_id": "20a281598c4b5f24da35d80abaf1a660d7a05a7d30f7566041448bf4c6a49ad7"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 29, "content": " let padding = config.window.padding(self.window.scale_factor as f32);\n\n let mut new_size = SizeInfo::new(\n width,\n height,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding,\n );\n\n // Update number of column/lines in the viewport.\n let search_active = search_state.history_index.is_some();\n let message_bar_lines = message_buffer.message().map_or(0, |m| m.text(&new_size).len());\n let search_lines = usize::from(search_active);\n new_size.reserve_lines(message_bar_lines + search_lines);\n\n", "meta": {"hash_id": "72069e14b9121905e307747adf2b90579c0aae90998058ad1320e1f17c892f17"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 30, "content": " // Update resize increments.\n if config.window.resize_increments {\n self.window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n // Resize when terminal when its dimensions have changed.\n if self.size_info.screen_lines() != new_size.screen_lines\n || self.size_info.columns() != new_size.columns()\n {\n // Resize PTY.\n pty_resize_handle.on_resize(new_size.into());\n\n", "meta": {"hash_id": "591f61d27ba4860ebe42874418318ca473425d226193d4c3803e71da91332e9a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 31, "content": " // Resize terminal.\n terminal.resize(new_size);\n\n // Resize damage tracking.\n self.damage_tracker.resize(new_size.screen_lines(), new_size.columns());\n }\n\n // Check if dimensions have changed.\n if new_size != self.size_info {\n // Queue renderer update.\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.resize = true;\n\n // Clear focused search match.\n search_state.clear_focused_match();\n }\n self.size_info = new_size;\n }\n\n", "meta": {"hash_id": "cbd894d544aaeadcbfea18922999949498c11df57b4722c93f1d487f08ede145"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 32, "content": " // NOTE: Renderer updates are split off, since platforms like Wayland require resize and other\n // OpenGL operations to be performed right before rendering. Otherwise they could lock the\n // back buffer and render with the previous state. This also solves flickering during resizes.\n //\n /// Update the state of the renderer.\n pub fn process_renderer_update(&mut self) {\n let renderer_update = match self.pending_renderer_update.take() {\n Some(renderer_update) => renderer_update,\n _ => return,\n };\n\n", "meta": {"hash_id": "bbf795ca1bbb43287648230c60608c2d4c4a558dfac639cf85cee2a03f842120"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 33, "content": " // Resize renderer.\n if renderer_update.resize {\n let width = NonZeroU32::new(self.size_info.width() as u32).unwrap();\n let height = NonZeroU32::new(self.size_info.height() as u32).unwrap();\n self.surface.resize(&self.context, width, height);\n }\n\n // Ensure we're modifying the correct OpenGL context.\n self.make_current();\n\n if renderer_update.clear_font_cache {\n self.reset_glyph_cache();\n }\n\n self.renderer.resize(&self.size_info);\n\n info!(\"Padding: {} x {}\", self.size_info.padding_x(), self.size_info.padding_y());\n info!(\"Width: {}, Height: {}\", self.size_info.width(), self.size_info.height());\n }\n\n", "meta": {"hash_id": "bae2fda06b24164a992b035f05346cec0db2aae06c91013b8901d87838639100"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 34, "content": " /// Draw the screen.\n ///\n /// A reference to Term whose state is being drawn must be provided.\n ///\n /// This call may block if vsync is enabled.\n pub fn draw(\n &mut self,\n mut terminal: MutexGuard<'_, Term>,\n scheduler: &mut Scheduler,\n message_buffer: &MessageBuffer,\n config: &UiConfig,\n search_state: &mut SearchState,\n ) {\n // Collect renderable content before the terminal is dropped.\n let mut content = RenderableContent::new(config, self, &terminal, search_state);\n", "meta": {"hash_id": "4ee907a039f825726c2412e854fe4bc9a179f4904d347aed1d037b9b9432af2f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 35, "content": " let mut grid_cells = Vec::new();\n for cell in &mut content {\n grid_cells.push(cell);\n }\n let selection_range = content.selection_range();\n let foreground_color = content.color(NamedColor::Foreground as usize);\n let background_color = content.color(NamedColor::Background as usize);\n let display_offset = content.display_offset();\n let cursor = content.cursor();\n\n let cursor_point = terminal.grid().cursor.point;\n let total_lines = terminal.grid().total_lines();\n let metrics = self.glyph_cache.font_metrics();\n let size_info = self.size_info;\n\n let vi_mode = terminal.mode().contains(TermMode::VI);\n let vi_cursor_point = if vi_mode { Some(terminal.vi_mode_cursor.point) } else { None };\n\n", "meta": {"hash_id": "fe491ce79ac648bfff38ee67779efea880d2cd4a4839d5ec21b41f0815d5442e"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 36, "content": " // Add damage from the terminal.\n if self.collect_damage() {\n match terminal.damage() {\n TermDamage::Full => self.damage_tracker.frame().mark_fully_damaged(),\n TermDamage::Partial(damaged_lines) => {\n for damage in damaged_lines {\n self.damage_tracker.frame().damage_line(damage);\n }\n },\n }\n terminal.reset_damage();\n }\n\n // Drop terminal as early as possible to free lock.\n drop(terminal);\n\n // Add damage from alacritty's UI elements overlapping terminal.\n if self.collect_damage() {\n let requires_full_damage = self.visual_bell.intensity() != 0.\n || self.hint_state.active()\n || search_state.regex().is_some();\n\n", "meta": {"hash_id": "78edad84eefb6200419f24f43c2eafca931dce7eec6ca54851fe18f91979f03b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 37, "content": " if requires_full_damage {\n self.damage_tracker.frame().mark_fully_damaged();\n self.damage_tracker.next_frame().mark_fully_damaged();\n }\n\n let vi_cursor_viewport_point =\n vi_cursor_point.and_then(|cursor| point_to_viewport(display_offset, cursor));\n\n self.damage_tracker.damage_vi_cursor(vi_cursor_viewport_point);\n self.damage_tracker.damage_selection(selection_range, display_offset);\n }\n\n // Make sure this window's OpenGL context is active.\n self.make_current();\n\n self.renderer.clear(background_color, config.window_opacity());\n let mut lines = RenderLines::new();\n\n // Optimize loop hint comparator.\n let has_highlighted_hint =\n self.highlighted_hint.is_some() || self.vi_highlighted_hint.is_some();\n\n", "meta": {"hash_id": "fbf5a992b119745ae32862906a730ddbedac02888a88bc5edd93d7cfaa3d4982"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 38, "content": " // Draw grid.\n {\n let _sampler = self.meter.sampler();\n\n // Ensure macOS hasn't reset our viewport.\n #[cfg(target_os = \"macos\")]\n self.renderer.set_viewport(&size_info);\n\n let glyph_cache = &mut self.glyph_cache;\n let highlighted_hint = &self.highlighted_hint;\n let vi_highlighted_hint = &self.vi_highlighted_hint;\n let damage_tracker = &mut self.damage_tracker;\n\n self.renderer.draw_cells(\n &size_info,\n glyph_cache,\n grid_cells.into_iter().map(|mut cell| {\n // Underline hints hovered by mouse or vi mode cursor.\n let point = term::viewport_to_point(display_offset, cell.point);\n\n", "meta": {"hash_id": "8c40ac29292bd9425a2fcef0f8c826637888ee41be53e36d9a257d3c3a7c15a3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 39, "content": " if has_highlighted_hint {\n let hyperlink =\n cell.extra.as_ref().and_then(|extra| extra.hyperlink.as_ref());\n if highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n || vi_highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n {\n cell.flags.insert(Flags::UNDERLINE);\n // Damage hints for the current and next frames.\n damage_tracker.frame().damage_point(cell.point);\n damage_tracker.next_frame().damage_point(cell.point);\n }\n }\n\n", "meta": {"hash_id": "3520acced525739e717b6e0e00879a0189cd42190601b2223baa6ad5555801e4"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 40, "content": " // Update underline/strikeout.\n lines.update(&cell);\n\n cell\n }),\n );\n }\n\n let mut rects = lines.rects(&metrics, &size_info);\n\n if let Some(vi_cursor_point) = vi_cursor_point {\n // Indicate vi mode by showing the cursor's position in the top right corner.\n let line = (-vi_cursor_point.line.0 + size_info.bottommost_line().0) as usize;\n let obstructed_column = Some(vi_cursor_point)\n .filter(|point| point.line == -(display_offset as i32))\n .map(|point| point.column);\n self.draw_line_indicator(config, total_lines, obstructed_column, line);\n } else if search_state.regex().is_some() {\n // Show current display offset in vi-less search to indicate match position.\n self.draw_line_indicator(config, total_lines, None, display_offset);\n };\n\n", "meta": {"hash_id": "298265debd2bb5d01f2becadcd2b913ad31b2ad76776a82a8cdc3f36307c54dd"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 41, "content": " // Draw cursor.\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n\n // Push visual bell after url/underline/strikeout rects.\n let visual_bell_intensity = self.visual_bell.intensity();\n if visual_bell_intensity != 0. {\n let visual_bell_rect = RenderRect::new(\n 0.,\n 0.,\n size_info.width(),\n size_info.height(),\n config.bell.color,\n visual_bell_intensity as f32,\n );\n rects.push(visual_bell_rect);\n }\n\n", "meta": {"hash_id": "18599d3454e142675535df415c5701663d07bffa7f8411f237c35a3d22c94e8f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 42, "content": " // Handle IME positioning and search bar rendering.\n let ime_position = match search_state.regex() {\n Some(regex) => {\n let search_label = match search_state.direction() {\n Direction::Right => FORWARD_SEARCH_LABEL,\n Direction::Left => BACKWARD_SEARCH_LABEL,\n };\n\n let search_text = Self::format_search(regex, search_label, size_info.columns());\n\n // Render the search bar.\n self.draw_search(config, &search_text);\n\n // Draw search bar cursor.\n let line = size_info.screen_lines();\n let column = Column(search_text.chars().count() - 1);\n\n", "meta": {"hash_id": "c20bfcb216ac981870022b5ffddbfa9b53bc1cf353566e9a690e17e3f4d6f544"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 43, "content": " // Add cursor to search bar if IME is not active.\n if self.ime.preedit().is_none() {\n let fg = config.colors.footer_bar_foreground();\n let shape = CursorShape::Underline;\n let cursor = RenderableCursor::new(Point::new(line, column), shape, fg, false);\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n }\n\n Some(Point::new(line, column))\n },\n None => {\n let num_lines = self.size_info.screen_lines();\n term::point_to_viewport(display_offset, cursor_point)\n .filter(|point| point.line < num_lines)\n },\n };\n\n", "meta": {"hash_id": "350ea9d2cd9bdced937484a43d5eac3fbedf6ed7063a0928056225ec2ca7058e"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 44, "content": " // Handle IME.\n if self.ime.is_enabled() {\n if let Some(point) = ime_position {\n let (fg, bg) = if search_state.regex().is_some() {\n (config.colors.footer_bar_foreground(), config.colors.footer_bar_background())\n } else {\n (foreground_color, background_color)\n };\n\n self.draw_ime_preview(point, fg, bg, &mut rects, config);\n }\n }\n\n if let Some(message) = message_buffer.message() {\n let search_offset = usize::from(search_state.regex().is_some());\n let text = message.text(&size_info);\n\n", "meta": {"hash_id": "5b742665b72ab3ff0a08013a146530fe6e6830cef61bce1a7f79aebf1eb67555"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 45, "content": " // Create a new rectangle for the background.\n let start_line = size_info.screen_lines() + search_offset;\n let y = size_info.cell_height().mul_add(start_line as f32, size_info.padding_y());\n\n let bg = match message.ty() {\n MessageType::Error => config.colors.normal.red,\n MessageType::Warning => config.colors.normal.yellow,\n };\n\n", "meta": {"hash_id": "a0a75ee6d8209be0e7e86de929d23cb6b231aa453c4bceb95b062a5bdbd0d51b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 46, "content": " let x = 0;\n let width = size_info.width() as i32;\n let height = (size_info.height() - y) as i32;\n let message_bar_rect =\n RenderRect::new(x as f32, y, width as f32, height as f32, bg, 1.);\n\n // Push message_bar in the end, so it'll be above all other content.\n rects.push(message_bar_rect);\n\n // Always damage message bar, since it could have messages of the same size in it.\n self.damage_tracker.frame().add_viewport_rect(&size_info, x, y as i32, width, height);\n\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n\n", "meta": {"hash_id": "7ff46d8ef5953994171ae2588af562115c117c5a10cdaccffbff5610b61c30d9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 47, "content": " // Relay messages to the user.\n let glyph_cache = &mut self.glyph_cache;\n let fg = config.colors.primary.background;\n for (i, message_text) in text.iter().enumerate() {\n let point = Point::new(start_line + i, Column(0));\n self.renderer.draw_string(\n point,\n fg,\n bg,\n message_text.chars(),\n &size_info,\n glyph_cache,\n );\n }\n } else {\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n }\n\n", "meta": {"hash_id": "4301e36e0494c3864c04e5e18d5714c4a38bacf6c448ef0f4b761c5a76781744"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 48, "content": " self.draw_render_timer(config);\n\n // Draw hyperlink uri preview.\n if has_highlighted_hint {\n let cursor_point = vi_cursor_point.or(Some(cursor_point));\n self.draw_hyperlink_preview(config, cursor_point, display_offset);\n }\n\n // Notify winit that we're about to present.\n self.window.pre_present_notify();\n\n // Highlight damage for debugging.\n if self.damage_tracker.debug {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n let mut rects = Vec::with_capacity(damage.len());\n self.highlight_damage(&mut rects);\n self.renderer.draw_rects(&self.size_info, &metrics, rects);\n }\n\n // Clearing debug highlights from the previous frame requires full redraw.\n self.swap_buffers();\n\n", "meta": {"hash_id": "634ea3f07419e628e451619fac284b5833898edc6e6206e6477f6b9397a2e163"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 49, "content": " if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n", "meta": {"hash_id": "8193ff2c96e4891d02efb66bc8a53c84ac112a628fd77195c003536960b3267c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 50, "content": " /// Update the mouse/vi mode cursor hint highlighting.\n ///\n /// This will return whether the highlighted hints changed.\n pub fn update_highlighted_hints(\n &mut self,\n term: &Term,\n config: &UiConfig,\n mouse: &Mouse,\n modifiers: ModifiersState,\n ) -> bool {\n // Update vi mode cursor hint.\n let vi_highlighted_hint = if term.mode().contains(TermMode::VI) {\n let mods = ModifiersState::all();\n let point = term.vi_mode_cursor.point;\n hint::highlighted_at(term, config, point, mods)\n } else {\n None\n };\n let mut dirty = vi_highlighted_hint != self.vi_highlighted_hint;\n self.vi_highlighted_hint = vi_highlighted_hint;\n\n", "meta": {"hash_id": "ab85133046538b326b5da625910dbea49fdcfb5196b1302be9d7b38edfd2ecb1"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 51, "content": " // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n", "meta": {"hash_id": "c4e406929070cc79996d7fb380bd3196d1e17ec94dabeb5bf0058a1b83c8c4ca"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 52, "content": " // Update cursor shape.\n if highlighted_hint.is_some() {\n // If mouse changed the line, we should update the hyperlink preview, since the\n // highlighted hint could be disrupted by the old preview.\n dirty = self.hint_mouse_point.map_or(false, |p| p.line != point.line);\n self.hint_mouse_point = Some(point);\n self.window.set_mouse_cursor(CursorIcon::Pointer);\n } else if self.highlighted_hint.is_some() {\n self.hint_mouse_point = None;\n if term.mode().intersects(TermMode::MOUSE_MODE) && !term.mode().contains(TermMode::VI) {\n self.window.set_mouse_cursor(CursorIcon::Default);\n } else {\n self.window.set_mouse_cursor(CursorIcon::Text);\n }\n }\n\n", "meta": {"hash_id": "7a604714c947c7e581d69f25990ca030860f8076664f95c15614176b02f14f31"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 53, "content": " dirty |= self.highlighted_hint != highlighted_hint;\n self.highlighted_hint = highlighted_hint;\n\n dirty\n }\n\n #[inline(never)]\n fn draw_ime_preview(\n &mut self,\n point: Point,\n fg: Rgb,\n bg: Rgb,\n rects: &mut Vec,\n config: &UiConfig,\n ) {\n let preedit = match self.ime.preedit() {\n Some(preedit) => preedit,\n None => {\n // In case we don't have preedit, just set the popup point.\n self.window.update_ime_position(point, &self.size_info);\n return;\n },\n };\n\n let num_cols = self.size_info.columns();\n\n", "meta": {"hash_id": "36438e7aceac12b36ca1853743dbd414cc5a4eb8e6aa275d9cff045b7e2d4c88"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 54, "content": " // Get the visible preedit.\n let visible_text: String = match (preedit.cursor_byte_offset, preedit.cursor_end_offset) {\n (Some(byte_offset), Some(end_offset)) if end_offset > num_cols => StrShortener::new(\n &preedit.text[byte_offset..],\n num_cols,\n ShortenDirection::Right,\n Some(SHORTENER),\n ),\n _ => {\n StrShortener::new(&preedit.text, num_cols, ShortenDirection::Left, Some(SHORTENER))\n },\n }\n .collect();\n\n", "meta": {"hash_id": "02e3f7e39d36be809a0819dc9c541961971649fb2aa331c7c50fa4ec5e119978"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 55, "content": " let visible_len = visible_text.chars().count();\n\n let end = cmp::min(point.column.0 + visible_len, num_cols);\n let start = end.saturating_sub(visible_len);\n\n let start = Point::new(point.line, Column(start));\n let end = Point::new(point.line, Column(end - 1));\n\n let glyph_cache = &mut self.glyph_cache;\n let metrics = glyph_cache.font_metrics();\n\n self.renderer.draw_string(\n start,\n fg,\n bg,\n visible_text.chars(),\n &self.size_info,\n glyph_cache,\n );\n\n // Damage preedit inside the terminal viewport.\n if self.collect_damage() && point.line < self.size_info.screen_lines() {\n let damage = LineDamageBounds::new(start.line, 0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n", "meta": {"hash_id": "f4465b191a88d6c08068b0e33f39d884f63a67cc6829e896f24ecc1b96158940"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 56, "content": " // Add underline for preedit text.\n let underline = RenderLine { start, end, color: fg };\n rects.extend(underline.rects(Flags::UNDERLINE, &metrics, &self.size_info));\n\n let ime_popup_point = match preedit.cursor_end_offset {\n Some(cursor_end_offset) if cursor_end_offset != 0 => {\n let is_wide = preedit.text[preedit.cursor_byte_offset.unwrap_or_default()..]\n .chars()\n .next()\n .map(|ch| ch.width() == Some(2))\n .unwrap_or_default();\n\n", "meta": {"hash_id": "6a6a63878025944ce98f665e2365a98949fe34df15776aa07ce28cc0056d96e3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 57, "content": " let cursor_column = Column(\n (end.column.0 as isize - cursor_end_offset as isize + 1).max(0) as usize,\n );\n let cursor_point = Point::new(point.line, cursor_column);\n let cursor =\n RenderableCursor::new(cursor_point, CursorShape::HollowBlock, fg, is_wide);\n rects.extend(cursor.rects(&self.size_info, config.cursor.thickness()));\n cursor_point\n },\n _ => end,\n };\n\n", "meta": {"hash_id": "0999c2483d874c5f24ec5bbd6d71bd76a775140cbd0ab32eebb7e16a14576bbb"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 58, "content": " self.window.update_ime_position(ime_popup_point, &self.size_info);\n }\n\n /// Format search regex to account for the cursor and fullwidth characters.\n fn format_search(search_regex: &str, search_label: &str, max_width: usize) -> String {\n let label_len = search_label.len();\n\n // Skip `search_regex` formatting if only label is visible.\n if label_len > max_width {\n return search_label[..max_width].to_owned();\n }\n\n", "meta": {"hash_id": "3b5e31b08e83473461ab3d08a6586fcc45fe7f018892e0be697974a32cf99556"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 59, "content": " // The search string consists of `search_label` + `search_regex` + `cursor`.\n let mut bar_text = String::from(search_label);\n bar_text.extend(StrShortener::new(\n search_regex,\n max_width.wrapping_sub(label_len + 1),\n ShortenDirection::Left,\n Some(SHORTENER),\n ));\n\n // Add place for cursor.\n bar_text.push(' ');\n\n bar_text\n }\n\n", "meta": {"hash_id": "778d02069dba2c206695f9644f6da84d4059373e565c3a0a3582cb79e27fff53"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 60, "content": " /// Draw preview for the currently highlighted `Hyperlink`.\n #[inline(never)]\n fn draw_hyperlink_preview(\n &mut self,\n config: &UiConfig,\n cursor_point: Option,\n display_offset: usize,\n ) {\n let num_cols = self.size_info.columns();\n let uris: Vec<_> = self\n .highlighted_hint\n .iter()\n .chain(&self.vi_highlighted_hint)\n .filter_map(|hint| hint.hyperlink().map(|hyperlink| hyperlink.uri()))\n .map(|uri| StrShortener::new(uri, num_cols, ShortenDirection::Right, Some(SHORTENER)))\n .collect();\n\n", "meta": {"hash_id": "0b58c59db2b1bd7885858bc980e47aae6f8e8bb2087ed85e4fde922bfca11125"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 61, "content": " if uris.is_empty() {\n return;\n }\n\n // The maximum amount of protected lines including the ones we'll show preview on.\n let max_protected_lines = uris.len() * 2;\n\n // Lines we shouldn't show preview on, because it'll obscure the highlighted hint.\n let mut protected_lines = Vec::with_capacity(max_protected_lines);\n if self.size_info.screen_lines() > max_protected_lines {\n // Prefer to show preview even when it'll likely obscure the highlighted hint, when\n // there's no place left for it.\n protected_lines.push(self.hint_mouse_point.map(|point| point.line));\n protected_lines.push(cursor_point.map(|point| point.line));\n }\n\n", "meta": {"hash_id": "783ec3774b49fcca910661e627316fe5e9efbeffe175a624958653998950f878"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 62, "content": " // Find the line in viewport we can draw preview on without obscuring protected lines.\n let viewport_bottom = self.size_info.bottommost_line() - Line(display_offset as i32);\n let viewport_top = viewport_bottom - (self.size_info.screen_lines() - 1);\n let uri_lines = (viewport_top.0..=viewport_bottom.0)\n .rev()\n .map(|line| Some(Line(line)))\n .filter_map(|line| {\n if protected_lines.contains(&line) {\n None\n } else {\n protected_lines.push(line);\n line\n }\n })\n .take(uris.len())\n .flat_map(|line| term::point_to_viewport(display_offset, Point::new(line, Column(0))));\n\n", "meta": {"hash_id": "f16942c0759faa75a9115d4645762a3ad51c204eae0f9765e0f0c1e2d6b66385"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 63, "content": " let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n for (uri, point) in uris.into_iter().zip(uri_lines) {\n // Damage the uri preview.\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n\n // Damage the uri preview for the next frame as well.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n self.renderer.draw_string(point, fg, bg, uri, &self.size_info, &mut self.glyph_cache);\n }\n }\n\n", "meta": {"hash_id": "e310b116e108b9ec16c1b9f09d5e70f193b7b368a4b94c97682448e4fbb308c6"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 64, "content": " /// Draw current search regex.\n #[inline(never)]\n fn draw_search(&mut self, config: &UiConfig, text: &str) {\n // Assure text length is at least num_cols.\n let num_cols = self.size_info.columns();\n let text = format!(\"{:<1$}\", text, num_cols);\n\n let point = Point::new(self.size_info.screen_lines(), Column(0));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n\n", "meta": {"hash_id": "94c9b5e79fb5bd80da00b4ecbe94fd9020f9e4110ea6656f13003da0f41cda9a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 65, "content": " self.renderer.draw_string(\n point,\n fg,\n bg,\n text.chars(),\n &self.size_info,\n &mut self.glyph_cache,\n );\n }\n\n /// Draw render timer.\n #[inline(never)]\n fn draw_render_timer(&mut self, config: &UiConfig) {\n if !config.debug.render_timer {\n return;\n }\n\n let timing = format!(\"{:.3} usec\", self.meter.average());\n let point = Point::new(self.size_info.screen_lines().saturating_sub(2), Column(0));\n let fg = config.colors.primary.background;\n let bg = config.colors.normal.red;\n\n", "meta": {"hash_id": "704ee1cc058f89cf5781311565df241f3affc1a6a6b3665420dbf77bb5cad1af"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 66, "content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, timing.len());\n self.damage_tracker.frame().damage_line(damage);\n // Damage the render timer for the next frame.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, timing.chars(), &self.size_info, glyph_cache);\n }\n\n", "meta": {"hash_id": "490dbac953cf0c174cdebed878c4b2813294da5220a60dbca8b8f17160fdb1ad"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 67, "content": " /// Draw an indicator for the position of a line in history.\n #[inline(never)]\n fn draw_line_indicator(\n &mut self,\n config: &UiConfig,\n total_lines: usize,\n obstructed_column: Option,\n line: usize,\n ) {\n let columns = self.size_info.columns();\n let text = format!(\"[{}/{}]\", line, total_lines - 1);\n let column = Column(self.size_info.columns().saturating_sub(text.len()));\n let point = Point::new(0, column);\n\n", "meta": {"hash_id": "3808ecaf15da81a70e597a07f5c5da5ba4c17b1cf6f0dcfa2adf12f3bdbb475d"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 68, "content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, columns - 1);\n self.damage_tracker.frame().damage_line(damage);\n // Damage it on the next frame in case it goes away.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let colors = &config.colors;\n let fg = colors.line_indicator.foreground.unwrap_or(colors.primary.background);\n let bg = colors.line_indicator.background.unwrap_or(colors.primary.foreground);\n\n", "meta": {"hash_id": "5c1d0985bc1d7908fb5c317620576ac17fb3493eabd4d7d8fa3fefd35ab7ce9b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 69, "content": " // Do not render anything if it would obscure the vi mode cursor.\n if obstructed_column.map_or(true, |obstructed_column| obstructed_column < column) {\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, text.chars(), &self.size_info, glyph_cache);\n }\n }\n\n /// Returns `true` if damage information should be collected, `false` otherwise.\n #[inline]\n fn collect_damage(&self) -> bool {\n matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) || self.damage_tracker.debug\n }\n\n", "meta": {"hash_id": "2962a6868ce13297567593afea28482302fc693fe7e9f861f1a396aac68e724b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 70, "content": " /// Highlight damaged rects.\n ///\n /// This function is for debug purposes only.\n fn highlight_damage(&self, render_rects: &mut Vec) {\n for damage_rect in &self.damage_tracker.shape_frame_damage(self.size_info.into()) {\n let x = damage_rect.x as f32;\n let height = damage_rect.height as f32;\n let width = damage_rect.width as f32;\n let y = damage_y_to_viewport_y(&self.size_info, damage_rect) as f32;\n let render_rect = RenderRect::new(x, y, width, height, DAMAGE_RECT_COLOR, 0.5);\n\n", "meta": {"hash_id": "0f29cbbaec96d2dd0f1633b9ce7032e7f484e9c69d2639e8edd402f087889cbf"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 71, "content": " render_rects.push(render_rect);\n }\n }\n\n /// Request a new frame for a window on Wayland.\n fn request_frame(&mut self, scheduler: &mut Scheduler) {\n // Mark that we've used a frame.\n self.window.has_frame = false;\n\n // Get the display vblank interval.\n let monitor_vblank_interval = 1_000_000.\n / self\n .window\n .current_monitor()\n .and_then(|monitor| monitor.refresh_rate_millihertz())\n .unwrap_or(60_000) as f64;\n\n", "meta": {"hash_id": "cb8e18791a8d3f8818d32f820bb15fb163aa24833619cb3ad94e4d6e93bedbed"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 72, "content": " // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\n", "meta": {"hash_id": "30e322a3414ba5e54e2599830958a3907fe0b065035a8c9cb418bf4c1d72efcd"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 73, "content": "impl Drop for Display {\n fn drop(&mut self) {\n // Switch OpenGL context before dropping, otherwise objects (like programs) from other\n // contexts might be deleted when dropping renderer.\n self.make_current();\n unsafe {\n ManuallyDrop::drop(&mut self.renderer);\n ManuallyDrop::drop(&mut self.context);\n ManuallyDrop::drop(&mut self.surface);\n }\n }\n}\n\n", "meta": {"hash_id": "bddb13186a6094969454c24a3f01f270d1b6b0e11ab07064f85194819b7209c9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 74, "content": "/// Input method state.\n#[derive(Debug, Default)]\npub struct Ime {\n /// Whether the IME is enabled.\n enabled: bool,\n\n /// Current IME preedit.\n preedit: Option,\n}\n\nimpl Ime {\n #[inline]\n pub fn set_enabled(&mut self, is_enabled: bool) {\n if is_enabled {\n self.enabled = is_enabled\n } else {\n // Clear state when disabling IME.\n *self = Default::default();\n }\n }\n\n #[inline]\n pub fn is_enabled(&self) -> bool {\n self.enabled\n }\n\n #[inline]\n pub fn set_preedit(&mut self, preedit: Option) {\n self.preedit = preedit;\n }\n\n #[inline]\n pub fn preedit(&self) -> Option<&Preedit> {\n self.preedit.as_ref()\n }\n}\n\n#[derive(Debug, Default, PartialEq, Eq)]\npub struct Preedit {\n /// The preedit text.\n text: String,\n\n", "meta": {"hash_id": "3f9b2205ea82cbcbaa666a59e8888ccf1ec69f058ede785adacc2f44e5761c1a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 75, "content": " /// Byte offset for cursor start into the preedit text.\n ///\n /// `None` means that the cursor is invisible.\n cursor_byte_offset: Option,\n\n /// The cursor offset from the end of the preedit in char width.\n cursor_end_offset: Option,\n}\n\nimpl Preedit {\n pub fn new(text: String, cursor_byte_offset: Option) -> Self {\n let cursor_end_offset = if let Some(byte_offset) = cursor_byte_offset {\n // Convert byte offset into char offset.\n let cursor_end_offset =\n text[byte_offset..].chars().fold(0, |acc, ch| acc + ch.width().unwrap_or(1));\n\n", "meta": {"hash_id": "c1bdc732d49c892d75f5fd3a6d0ab64d0f23a87b1bf485ad1c252bd881d09ba2"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 76, "content": " Some(cursor_end_offset)\n } else {\n None\n };\n\n Self { text, cursor_byte_offset, cursor_end_offset }\n }\n}\n\n/// Pending renderer updates.\n///\n/// All renderer updates are cached to be applied just before rendering, to avoid platform-specific\n/// rendering issues.\n#[derive(Debug, Default, Copy, Clone)]\npub struct RendererUpdate {\n /// Should resize the window.\n resize: bool,\n\n /// Clear font caches.\n clear_font_cache: bool,\n}\n\n/// Struct for safe in-place replacement.\n///\n/// This struct allows easily replacing struct fields that provide `self -> Self` methods in-place,\n/// without having to deal with constantly unwrapping the underlying [`Option`].\nstruct Replaceable(Option);\n\n", "meta": {"hash_id": "91b7c3746a5eb87290de64f6be7afc6266d4e43ccc5cbec4eb0b4dfddbb6132f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 77, "content": "impl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\n", "meta": {"hash_id": "d3d4d8f8720963a367d8f5bf74244739d252412ce356f463bbe3ed09eb00f2de"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 78, "content": "impl Deref for Replaceable {\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n self.get()\n }\n}\n\nimpl DerefMut for Replaceable {\n fn deref_mut(&mut self) -> &mut Self::Target {\n self.get_mut()\n }\n}\n\n/// The frame timer state.\npub struct FrameTimer {\n /// Base timestamp used to compute sync points.\n base: Instant,\n\n /// The last timestamp we synced to.\n last_synced_timestamp: Instant,\n\n /// The refresh rate we've used to compute sync timestamps.\n refresh_interval: Duration,\n}\n\nimpl FrameTimer {\n pub fn new() -> Self {\n let now = Instant::now();\n Self { base: now, last_synced_timestamp: now, refresh_interval: Duration::ZERO }\n }\n\n", "meta": {"hash_id": "26064c14ae007ff16e77137660718d45e30767b2eaea74e0ab974f76a635a2b0"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 79, "content": " /// Compute the delay that we should use to achieve the target frame\n /// rate.\n pub fn compute_timeout(&mut self, refresh_interval: Duration) -> Duration {\n let now = Instant::now();\n\n // Handle refresh rate change.\n if self.refresh_interval != refresh_interval {\n self.base = now;\n self.last_synced_timestamp = now;\n self.refresh_interval = refresh_interval;\n return refresh_interval;\n }\n\n let next_frame = self.last_synced_timestamp + self.refresh_interval;\n\n", "meta": {"hash_id": "51cde94b51707ea3131697086f758f952d2245228eb81581b37f916f06f5824c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 80, "content": " if next_frame < now {\n // Redraw immediately if we haven't drawn in over `refresh_interval` microseconds.\n let elapsed_micros = (now - self.base).as_micros() as u64;\n let refresh_micros = self.refresh_interval.as_micros() as u64;\n self.last_synced_timestamp =\n now - Duration::from_micros(elapsed_micros % refresh_micros);\n Duration::ZERO\n } else {\n // Redraw on the next `refresh_interval` clock tick.\n self.last_synced_timestamp = next_frame;\n next_frame - now\n }\n }\n}\n\n", "meta": {"hash_id": "3b7b39e20e3e2536fde3cc1b209c02f9909c9eeedf7fde9dfdad330f1345359b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 81, "content": "/// Calculate the cell dimensions based on font metrics.\n///\n/// This will return a tuple of the cell width and height.\n#[inline]\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n let offset_x = f64::from(config.font.offset.x);\n let offset_y = f64::from(config.font.offset.y);\n (\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n )\n}\n\n", "meta": {"hash_id": "e60e12c31ebbe6a66ef26ed43c604bb403db5158fc9be3f62f599f20819fe982"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 82, "content": "/// Calculate the size of the window given padding, terminal dimensions and cell size.\nfn window_size(\n config: &UiConfig,\n dimensions: Dimensions,\n cell_width: f32,\n cell_height: f32,\n scale_factor: f32,\n) -> PhysicalSize {\n let padding = config.window.padding(scale_factor);\n\n let grid_width = cell_width * dimensions.columns.max(MIN_COLUMNS) as f32;\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n PhysicalSize::new(width as u32, height as u32)\n}\n", "meta": {"hash_id": "e522ae0b3b63eb29c51c18a5828f1c0bd1bc3facf9bd57c05e8128ab5abd7a1c"}}]}], "golden_chunks": [{"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 14, "content": "/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n", "meta": {"hash_id": "3811d9736c47f355a8ad4b929c285e77eb099350e68420142636952a39f2223e"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/display/mod.rs", "repo_name": "alacritty/alacritty", "num_chunks": 83, "source": "codebases/jsonl"}} +{"query": "How does the Display struct handle configuration updates?", "answer": "The Display struct has an `update_config` method that takes a `&UiConfig`. This method updates the `damage_tracker.debug` field based on the `config.debug.highlight_damage` setting, updates the `visual_bell` based on the `config.bell` setting, and updates the `colors` field based on the `config.colors` setting.", "golden_doc_uuids": ["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b"], "golden_chunk_uuids": [["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", 49]], "golden_documents": [{"uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "content": "//! The display subsystem including window management, font rasterization, and\n//! GPU drawing.\n\nuse std::cmp;\nuse std::fmt::{self, Formatter};\nuse std::mem::{self, ManuallyDrop};\nuse std::num::NonZeroU32;\nuse std::ops::{Deref, DerefMut};\nuse std::time::{Duration, Instant};\n\nuse glutin::context::{NotCurrentContext, PossiblyCurrentContext};\nuse glutin::prelude::*;\nuse glutin::surface::{Surface, SwapInterval, WindowSurface};\n\nuse log::{debug, info};\nuse parking_lot::MutexGuard;\nuse raw_window_handle::RawWindowHandle;\nuse serde::{Deserialize, Serialize};\nuse winit::dpi::PhysicalSize;\nuse winit::keyboard::ModifiersState;\nuse winit::window::CursorIcon;\n\nuse crossfont::{Rasterize, Rasterizer, Size as FontSize};\nuse unicode_width::UnicodeWidthChar;\n\nuse alacritty_terminal::event::{EventListener, OnResize, WindowSize};\nuse alacritty_terminal::grid::Dimensions as TermDimensions;\nuse alacritty_terminal::index::{Column, Direction, Line, Point};\nuse alacritty_terminal::selection::Selection;\nuse alacritty_terminal::term::cell::Flags;\nuse alacritty_terminal::term::{\n self, point_to_viewport, LineDamageBounds, Term, TermDamage, TermMode, MIN_COLUMNS,\n MIN_SCREEN_LINES,\n};\nuse alacritty_terminal::vte::ansi::{CursorShape, NamedColor};\n\nuse crate::config::font::Font;\nuse crate::config::window::Dimensions;\n#[cfg(not(windows))]\nuse crate::config::window::StartupMode;\nuse crate::config::UiConfig;\nuse crate::display::bell::VisualBell;\nuse crate::display::color::{List, Rgb};\nuse crate::display::content::{RenderableContent, RenderableCursor};\nuse crate::display::cursor::IntoRects;\nuse crate::display::damage::{damage_y_to_viewport_y, DamageTracker};\nuse crate::display::hint::{HintMatch, HintState};\nuse crate::display::meter::Meter;\nuse crate::display::window::Window;\nuse crate::event::{Event, EventType, Mouse, SearchState};\nuse crate::message_bar::{MessageBuffer, MessageType};\nuse crate::renderer::rects::{RenderLine, RenderLines, RenderRect};\nuse crate::renderer::{self, GlyphCache, Renderer};\nuse crate::scheduler::{Scheduler, TimerId, Topic};\nuse crate::string::{ShortenDirection, StrShortener};\n\npub mod color;\npub mod content;\npub mod cursor;\npub mod hint;\npub mod window;\n\nmod bell;\nmod damage;\nmod meter;\n\n/// Label for the forward terminal search bar.\nconst FORWARD_SEARCH_LABEL: &str = \"Search: \";\n\n/// Label for the backward terminal search bar.\nconst BACKWARD_SEARCH_LABEL: &str = \"Backward Search: \";\n\n/// The character used to shorten the visible text like uri preview or search regex.\nconst SHORTENER: char = '…';\n\n/// Color which is used to highlight damaged rects when debugging.\nconst DAMAGE_RECT_COLOR: Rgb = Rgb::new(255, 0, 255);\n\n#[derive(Debug)]\npub enum Error {\n /// Error with window management.\n Window(window::Error),\n\n /// Error dealing with fonts.\n Font(crossfont::Error),\n\n /// Error in renderer.\n Render(renderer::Error),\n\n /// Error during context operations.\n Context(glutin::error::Error),\n}\n\nimpl std::error::Error for Error {\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n match self {\n Error::Window(err) => err.source(),\n Error::Font(err) => err.source(),\n Error::Render(err) => err.source(),\n Error::Context(err) => err.source(),\n }\n }\n}\n\nimpl fmt::Display for Error {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n match self {\n Error::Window(err) => err.fmt(f),\n Error::Font(err) => err.fmt(f),\n Error::Render(err) => err.fmt(f),\n Error::Context(err) => err.fmt(f),\n }\n }\n}\n\nimpl From for Error {\n fn from(val: window::Error) -> Self {\n Error::Window(val)\n }\n}\n\nimpl From for Error {\n fn from(val: crossfont::Error) -> Self {\n Error::Font(val)\n }\n}\n\nimpl From for Error {\n fn from(val: renderer::Error) -> Self {\n Error::Render(val)\n }\n}\n\nimpl From for Error {\n fn from(val: glutin::error::Error) -> Self {\n Error::Context(val)\n }\n}\n\n/// Terminal size info.\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]\npub struct SizeInfo {\n /// Terminal window width.\n width: T,\n\n /// Terminal window height.\n height: T,\n\n /// Width of individual cell.\n cell_width: T,\n\n /// Height of individual cell.\n cell_height: T,\n\n /// Horizontal window padding.\n padding_x: T,\n\n /// Vertical window padding.\n padding_y: T,\n\n /// Number of lines in the viewport.\n screen_lines: usize,\n\n /// Number of columns in the viewport.\n columns: usize,\n}\n\nimpl From> for SizeInfo {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n width: size_info.width as u32,\n height: size_info.height as u32,\n cell_width: size_info.cell_width as u32,\n cell_height: size_info.cell_height as u32,\n padding_x: size_info.padding_x as u32,\n padding_y: size_info.padding_y as u32,\n screen_lines: size_info.screen_lines,\n columns: size_info.screen_lines,\n }\n }\n}\n\nimpl From> for WindowSize {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n num_cols: size_info.columns() as u16,\n num_lines: size_info.screen_lines() as u16,\n cell_width: size_info.cell_width() as u16,\n cell_height: size_info.cell_height() as u16,\n }\n }\n}\n\nimpl SizeInfo {\n #[inline]\n pub fn width(&self) -> T {\n self.width\n }\n\n #[inline]\n pub fn height(&self) -> T {\n self.height\n }\n\n #[inline]\n pub fn cell_width(&self) -> T {\n self.cell_width\n }\n\n #[inline]\n pub fn cell_height(&self) -> T {\n self.cell_height\n }\n\n #[inline]\n pub fn padding_x(&self) -> T {\n self.padding_x\n }\n\n #[inline]\n pub fn padding_y(&self) -> T {\n self.padding_y\n }\n}\n\nimpl SizeInfo {\n #[allow(clippy::too_many_arguments)]\n pub fn new(\n width: f32,\n height: f32,\n cell_width: f32,\n cell_height: f32,\n mut padding_x: f32,\n mut padding_y: f32,\n dynamic_padding: bool,\n ) -> SizeInfo {\n if dynamic_padding {\n padding_x = Self::dynamic_padding(padding_x.floor(), width, cell_width);\n padding_y = Self::dynamic_padding(padding_y.floor(), height, cell_height);\n }\n\n let lines = (height - 2. * padding_y) / cell_height;\n let screen_lines = cmp::max(lines as usize, MIN_SCREEN_LINES);\n\n let columns = (width - 2. * padding_x) / cell_width;\n let columns = cmp::max(columns as usize, MIN_COLUMNS);\n\n SizeInfo {\n width,\n height,\n cell_width,\n cell_height,\n padding_x: padding_x.floor(),\n padding_y: padding_y.floor(),\n screen_lines,\n columns,\n }\n }\n\n #[inline]\n pub fn reserve_lines(&mut self, count: usize) {\n self.screen_lines = cmp::max(self.screen_lines.saturating_sub(count), MIN_SCREEN_LINES);\n }\n\n /// Check if coordinates are inside the terminal grid.\n ///\n /// The padding, message bar or search are not counted as part of the grid.\n #[inline]\n pub fn contains_point(&self, x: usize, y: usize) -> bool {\n x <= (self.padding_x + self.columns as f32 * self.cell_width) as usize\n && x > self.padding_x as usize\n && y <= (self.padding_y + self.screen_lines as f32 * self.cell_height) as usize\n && y > self.padding_y as usize\n }\n\n /// Calculate padding to spread it evenly around the terminal content.\n #[inline]\n fn dynamic_padding(padding: f32, dimension: f32, cell_dimension: f32) -> f32 {\n padding + ((dimension - 2. * padding) % cell_dimension) / 2.\n }\n}\n\nimpl TermDimensions for SizeInfo {\n #[inline]\n fn columns(&self) -> usize {\n self.columns\n }\n\n #[inline]\n fn screen_lines(&self) -> usize {\n self.screen_lines\n }\n\n #[inline]\n fn total_lines(&self) -> usize {\n self.screen_lines()\n }\n}\n\n#[derive(Default, Clone, Debug, PartialEq, Eq)]\npub struct DisplayUpdate {\n pub dirty: bool,\n\n dimensions: Option>,\n cursor_dirty: bool,\n font: Option,\n}\n\nimpl DisplayUpdate {\n pub fn dimensions(&self) -> Option> {\n self.dimensions\n }\n\n pub fn font(&self) -> Option<&Font> {\n self.font.as_ref()\n }\n\n pub fn cursor_dirty(&self) -> bool {\n self.cursor_dirty\n }\n\n pub fn set_dimensions(&mut self, dimensions: PhysicalSize) {\n self.dimensions = Some(dimensions);\n self.dirty = true;\n }\n\n pub fn set_font(&mut self, font: Font) {\n self.font = Some(font);\n self.dirty = true;\n }\n\n pub fn set_cursor_dirty(&mut self) {\n self.cursor_dirty = true;\n self.dirty = true;\n }\n}\n\n/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n // Create the GL surface to draw into.\n let surface = renderer::platform::create_gl_surface(\n &gl_context,\n window.inner_size(),\n window.raw_window_handle(),\n )?;\n\n // Make the context current.\n let context = gl_context.make_current(&surface)?;\n\n // Create renderer.\n let mut renderer = Renderer::new(&context, config.debug.renderer)?;\n\n // Load font common glyphs to accelerate rendering.\n debug!(\"Filling glyph cache with common glyphs\");\n renderer.with_loader(|mut api| {\n glyph_cache.reset_glyph_cache(&mut api);\n });\n\n let padding = config.window.padding(window.scale_factor as f32);\n let viewport_size = window.inner_size();\n\n // Create new size with at least one column and row.\n let size_info = SizeInfo::new(\n viewport_size.width as f32,\n viewport_size.height as f32,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding && config.window.dimensions().is_none(),\n );\n\n info!(\"Cell size: {} x {}\", cell_width, cell_height);\n info!(\"Padding: {} x {}\", size_info.padding_x(), size_info.padding_y());\n info!(\"Width: {}, Height: {}\", size_info.width(), size_info.height());\n\n // Update OpenGL projection.\n renderer.resize(&size_info);\n\n // Clear screen.\n let background_color = config.colors.primary.background;\n renderer.clear(background_color, config.window_opacity());\n\n // Disable shadows for transparent windows on macOS.\n #[cfg(target_os = \"macos\")]\n window.set_has_shadow(config.window_opacity() >= 1.0);\n\n let is_wayland = matches!(raw_window_handle, RawWindowHandle::Wayland(_));\n\n // On Wayland we can safely ignore this call, since the window isn't visible until you\n // actually draw something into it and commit those changes.\n if !is_wayland {\n surface.swap_buffers(&context).expect(\"failed to swap buffers.\");\n renderer.finish();\n }\n\n // Set resize increments for the newly created window.\n if config.window.resize_increments {\n window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n window.set_visible(true);\n\n #[allow(clippy::single_match)]\n #[cfg(not(windows))]\n if !_tabbed {\n match config.window.startup_mode {\n #[cfg(target_os = \"macos\")]\n StartupMode::SimpleFullscreen => window.set_simple_fullscreen(true),\n StartupMode::Maximized if !is_wayland => window.set_maximized(true),\n _ => (),\n }\n }\n\n let hint_state = HintState::new(config.hints.alphabet());\n\n let mut damage_tracker = DamageTracker::new(size_info.screen_lines(), size_info.columns());\n damage_tracker.debug = config.debug.highlight_damage;\n\n // Disable vsync.\n if let Err(err) = surface.set_swap_interval(&context, SwapInterval::DontWait) {\n info!(\"Failed to disable vsync: {}\", err);\n }\n\n Ok(Self {\n context: ManuallyDrop::new(Replaceable::new(context)),\n visual_bell: VisualBell::from(&config.bell),\n renderer: ManuallyDrop::new(renderer),\n surface: ManuallyDrop::new(surface),\n colors: List::from(&config.colors),\n frame_timer: FrameTimer::new(),\n raw_window_handle,\n damage_tracker,\n glyph_cache,\n hint_state,\n size_info,\n font_size,\n window,\n pending_renderer_update: Default::default(),\n vi_highlighted_hint: Default::default(),\n highlighted_hint: Default::default(),\n hint_mouse_point: Default::default(),\n pending_update: Default::default(),\n cursor_hidden: Default::default(),\n meter: Default::default(),\n ime: Default::default(),\n })\n }\n\n #[inline]\n pub fn gl_context(&self) -> &PossiblyCurrentContext {\n self.context.get()\n }\n\n pub fn make_not_current(&mut self) {\n if self.context.get().is_current() {\n self.context.replace_with(|context| {\n context\n .make_not_current()\n .expect(\"failed to disable context\")\n .treat_as_possibly_current()\n });\n }\n }\n\n pub fn make_current(&self) {\n if !self.context.get().is_current() {\n self.context.make_current(&self.surface).expect(\"failed to make context current\")\n }\n }\n\n fn swap_buffers(&self) {\n #[allow(clippy::single_match)]\n let res = match (self.surface.deref(), &self.context.get()) {\n #[cfg(not(any(target_os = \"macos\", windows)))]\n (Surface::Egl(surface), PossiblyCurrentContext::Egl(context))\n if matches!(self.raw_window_handle, RawWindowHandle::Wayland(_))\n && !self.damage_tracker.debug =>\n {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n surface.swap_buffers_with_damage(context, &damage)\n },\n (surface, context) => surface.swap_buffers(context),\n };\n if let Err(err) = res {\n debug!(\"error calling swap_buffers: {}\", err);\n }\n }\n\n /// Update font size and cell dimensions.\n ///\n /// This will return a tuple of the cell width and height.\n fn update_font_size(\n glyph_cache: &mut GlyphCache,\n config: &UiConfig,\n font: &Font,\n ) -> (f32, f32) {\n let _ = glyph_cache.update_font_size(font);\n\n // Compute new cell sizes.\n compute_cell_size(config, &glyph_cache.font_metrics())\n }\n\n /// Reset glyph cache.\n fn reset_glyph_cache(&mut self) {\n let cache = &mut self.glyph_cache;\n self.renderer.with_loader(|mut api| {\n cache.reset_glyph_cache(&mut api);\n });\n }\n\n // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n let (mut cell_width, mut cell_height) =\n (self.size_info.cell_width(), self.size_info.cell_height());\n\n if pending_update.font().is_some() || pending_update.cursor_dirty() {\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.clear_font_cache = true\n }\n\n // Update font size and cell dimensions.\n if let Some(font) = pending_update.font() {\n let cell_dimensions = Self::update_font_size(&mut self.glyph_cache, config, font);\n cell_width = cell_dimensions.0;\n cell_height = cell_dimensions.1;\n\n info!(\"Cell size: {} x {}\", cell_width, cell_height);\n\n // Mark entire terminal as damaged since glyph size could change without cell size\n // changes.\n self.damage_tracker.frame().mark_fully_damaged();\n }\n\n let (mut width, mut height) = (self.size_info.width(), self.size_info.height());\n if let Some(dimensions) = pending_update.dimensions() {\n width = dimensions.width as f32;\n height = dimensions.height as f32;\n }\n\n let padding = config.window.padding(self.window.scale_factor as f32);\n\n let mut new_size = SizeInfo::new(\n width,\n height,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding,\n );\n\n // Update number of column/lines in the viewport.\n let search_active = search_state.history_index.is_some();\n let message_bar_lines = message_buffer.message().map_or(0, |m| m.text(&new_size).len());\n let search_lines = usize::from(search_active);\n new_size.reserve_lines(message_bar_lines + search_lines);\n\n // Update resize increments.\n if config.window.resize_increments {\n self.window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n // Resize when terminal when its dimensions have changed.\n if self.size_info.screen_lines() != new_size.screen_lines\n || self.size_info.columns() != new_size.columns()\n {\n // Resize PTY.\n pty_resize_handle.on_resize(new_size.into());\n\n // Resize terminal.\n terminal.resize(new_size);\n\n // Resize damage tracking.\n self.damage_tracker.resize(new_size.screen_lines(), new_size.columns());\n }\n\n // Check if dimensions have changed.\n if new_size != self.size_info {\n // Queue renderer update.\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.resize = true;\n\n // Clear focused search match.\n search_state.clear_focused_match();\n }\n self.size_info = new_size;\n }\n\n // NOTE: Renderer updates are split off, since platforms like Wayland require resize and other\n // OpenGL operations to be performed right before rendering. Otherwise they could lock the\n // back buffer and render with the previous state. This also solves flickering during resizes.\n //\n /// Update the state of the renderer.\n pub fn process_renderer_update(&mut self) {\n let renderer_update = match self.pending_renderer_update.take() {\n Some(renderer_update) => renderer_update,\n _ => return,\n };\n\n // Resize renderer.\n if renderer_update.resize {\n let width = NonZeroU32::new(self.size_info.width() as u32).unwrap();\n let height = NonZeroU32::new(self.size_info.height() as u32).unwrap();\n self.surface.resize(&self.context, width, height);\n }\n\n // Ensure we're modifying the correct OpenGL context.\n self.make_current();\n\n if renderer_update.clear_font_cache {\n self.reset_glyph_cache();\n }\n\n self.renderer.resize(&self.size_info);\n\n info!(\"Padding: {} x {}\", self.size_info.padding_x(), self.size_info.padding_y());\n info!(\"Width: {}, Height: {}\", self.size_info.width(), self.size_info.height());\n }\n\n /// Draw the screen.\n ///\n /// A reference to Term whose state is being drawn must be provided.\n ///\n /// This call may block if vsync is enabled.\n pub fn draw(\n &mut self,\n mut terminal: MutexGuard<'_, Term>,\n scheduler: &mut Scheduler,\n message_buffer: &MessageBuffer,\n config: &UiConfig,\n search_state: &mut SearchState,\n ) {\n // Collect renderable content before the terminal is dropped.\n let mut content = RenderableContent::new(config, self, &terminal, search_state);\n let mut grid_cells = Vec::new();\n for cell in &mut content {\n grid_cells.push(cell);\n }\n let selection_range = content.selection_range();\n let foreground_color = content.color(NamedColor::Foreground as usize);\n let background_color = content.color(NamedColor::Background as usize);\n let display_offset = content.display_offset();\n let cursor = content.cursor();\n\n let cursor_point = terminal.grid().cursor.point;\n let total_lines = terminal.grid().total_lines();\n let metrics = self.glyph_cache.font_metrics();\n let size_info = self.size_info;\n\n let vi_mode = terminal.mode().contains(TermMode::VI);\n let vi_cursor_point = if vi_mode { Some(terminal.vi_mode_cursor.point) } else { None };\n\n // Add damage from the terminal.\n if self.collect_damage() {\n match terminal.damage() {\n TermDamage::Full => self.damage_tracker.frame().mark_fully_damaged(),\n TermDamage::Partial(damaged_lines) => {\n for damage in damaged_lines {\n self.damage_tracker.frame().damage_line(damage);\n }\n },\n }\n terminal.reset_damage();\n }\n\n // Drop terminal as early as possible to free lock.\n drop(terminal);\n\n // Add damage from alacritty's UI elements overlapping terminal.\n if self.collect_damage() {\n let requires_full_damage = self.visual_bell.intensity() != 0.\n || self.hint_state.active()\n || search_state.regex().is_some();\n\n if requires_full_damage {\n self.damage_tracker.frame().mark_fully_damaged();\n self.damage_tracker.next_frame().mark_fully_damaged();\n }\n\n let vi_cursor_viewport_point =\n vi_cursor_point.and_then(|cursor| point_to_viewport(display_offset, cursor));\n\n self.damage_tracker.damage_vi_cursor(vi_cursor_viewport_point);\n self.damage_tracker.damage_selection(selection_range, display_offset);\n }\n\n // Make sure this window's OpenGL context is active.\n self.make_current();\n\n self.renderer.clear(background_color, config.window_opacity());\n let mut lines = RenderLines::new();\n\n // Optimize loop hint comparator.\n let has_highlighted_hint =\n self.highlighted_hint.is_some() || self.vi_highlighted_hint.is_some();\n\n // Draw grid.\n {\n let _sampler = self.meter.sampler();\n\n // Ensure macOS hasn't reset our viewport.\n #[cfg(target_os = \"macos\")]\n self.renderer.set_viewport(&size_info);\n\n let glyph_cache = &mut self.glyph_cache;\n let highlighted_hint = &self.highlighted_hint;\n let vi_highlighted_hint = &self.vi_highlighted_hint;\n let damage_tracker = &mut self.damage_tracker;\n\n self.renderer.draw_cells(\n &size_info,\n glyph_cache,\n grid_cells.into_iter().map(|mut cell| {\n // Underline hints hovered by mouse or vi mode cursor.\n let point = term::viewport_to_point(display_offset, cell.point);\n\n if has_highlighted_hint {\n let hyperlink =\n cell.extra.as_ref().and_then(|extra| extra.hyperlink.as_ref());\n if highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n || vi_highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n {\n cell.flags.insert(Flags::UNDERLINE);\n // Damage hints for the current and next frames.\n damage_tracker.frame().damage_point(cell.point);\n damage_tracker.next_frame().damage_point(cell.point);\n }\n }\n\n // Update underline/strikeout.\n lines.update(&cell);\n\n cell\n }),\n );\n }\n\n let mut rects = lines.rects(&metrics, &size_info);\n\n if let Some(vi_cursor_point) = vi_cursor_point {\n // Indicate vi mode by showing the cursor's position in the top right corner.\n let line = (-vi_cursor_point.line.0 + size_info.bottommost_line().0) as usize;\n let obstructed_column = Some(vi_cursor_point)\n .filter(|point| point.line == -(display_offset as i32))\n .map(|point| point.column);\n self.draw_line_indicator(config, total_lines, obstructed_column, line);\n } else if search_state.regex().is_some() {\n // Show current display offset in vi-less search to indicate match position.\n self.draw_line_indicator(config, total_lines, None, display_offset);\n };\n\n // Draw cursor.\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n\n // Push visual bell after url/underline/strikeout rects.\n let visual_bell_intensity = self.visual_bell.intensity();\n if visual_bell_intensity != 0. {\n let visual_bell_rect = RenderRect::new(\n 0.,\n 0.,\n size_info.width(),\n size_info.height(),\n config.bell.color,\n visual_bell_intensity as f32,\n );\n rects.push(visual_bell_rect);\n }\n\n // Handle IME positioning and search bar rendering.\n let ime_position = match search_state.regex() {\n Some(regex) => {\n let search_label = match search_state.direction() {\n Direction::Right => FORWARD_SEARCH_LABEL,\n Direction::Left => BACKWARD_SEARCH_LABEL,\n };\n\n let search_text = Self::format_search(regex, search_label, size_info.columns());\n\n // Render the search bar.\n self.draw_search(config, &search_text);\n\n // Draw search bar cursor.\n let line = size_info.screen_lines();\n let column = Column(search_text.chars().count() - 1);\n\n // Add cursor to search bar if IME is not active.\n if self.ime.preedit().is_none() {\n let fg = config.colors.footer_bar_foreground();\n let shape = CursorShape::Underline;\n let cursor = RenderableCursor::new(Point::new(line, column), shape, fg, false);\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n }\n\n Some(Point::new(line, column))\n },\n None => {\n let num_lines = self.size_info.screen_lines();\n term::point_to_viewport(display_offset, cursor_point)\n .filter(|point| point.line < num_lines)\n },\n };\n\n // Handle IME.\n if self.ime.is_enabled() {\n if let Some(point) = ime_position {\n let (fg, bg) = if search_state.regex().is_some() {\n (config.colors.footer_bar_foreground(), config.colors.footer_bar_background())\n } else {\n (foreground_color, background_color)\n };\n\n self.draw_ime_preview(point, fg, bg, &mut rects, config);\n }\n }\n\n if let Some(message) = message_buffer.message() {\n let search_offset = usize::from(search_state.regex().is_some());\n let text = message.text(&size_info);\n\n // Create a new rectangle for the background.\n let start_line = size_info.screen_lines() + search_offset;\n let y = size_info.cell_height().mul_add(start_line as f32, size_info.padding_y());\n\n let bg = match message.ty() {\n MessageType::Error => config.colors.normal.red,\n MessageType::Warning => config.colors.normal.yellow,\n };\n\n let x = 0;\n let width = size_info.width() as i32;\n let height = (size_info.height() - y) as i32;\n let message_bar_rect =\n RenderRect::new(x as f32, y, width as f32, height as f32, bg, 1.);\n\n // Push message_bar in the end, so it'll be above all other content.\n rects.push(message_bar_rect);\n\n // Always damage message bar, since it could have messages of the same size in it.\n self.damage_tracker.frame().add_viewport_rect(&size_info, x, y as i32, width, height);\n\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n\n // Relay messages to the user.\n let glyph_cache = &mut self.glyph_cache;\n let fg = config.colors.primary.background;\n for (i, message_text) in text.iter().enumerate() {\n let point = Point::new(start_line + i, Column(0));\n self.renderer.draw_string(\n point,\n fg,\n bg,\n message_text.chars(),\n &size_info,\n glyph_cache,\n );\n }\n } else {\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n }\n\n self.draw_render_timer(config);\n\n // Draw hyperlink uri preview.\n if has_highlighted_hint {\n let cursor_point = vi_cursor_point.or(Some(cursor_point));\n self.draw_hyperlink_preview(config, cursor_point, display_offset);\n }\n\n // Notify winit that we're about to present.\n self.window.pre_present_notify();\n\n // Highlight damage for debugging.\n if self.damage_tracker.debug {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n let mut rects = Vec::with_capacity(damage.len());\n self.highlight_damage(&mut rects);\n self.renderer.draw_rects(&self.size_info, &metrics, rects);\n }\n\n // Clearing debug highlights from the previous frame requires full redraw.\n self.swap_buffers();\n\n if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n /// Update the mouse/vi mode cursor hint highlighting.\n ///\n /// This will return whether the highlighted hints changed.\n pub fn update_highlighted_hints(\n &mut self,\n term: &Term,\n config: &UiConfig,\n mouse: &Mouse,\n modifiers: ModifiersState,\n ) -> bool {\n // Update vi mode cursor hint.\n let vi_highlighted_hint = if term.mode().contains(TermMode::VI) {\n let mods = ModifiersState::all();\n let point = term.vi_mode_cursor.point;\n hint::highlighted_at(term, config, point, mods)\n } else {\n None\n };\n let mut dirty = vi_highlighted_hint != self.vi_highlighted_hint;\n self.vi_highlighted_hint = vi_highlighted_hint;\n\n // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n // Update cursor shape.\n if highlighted_hint.is_some() {\n // If mouse changed the line, we should update the hyperlink preview, since the\n // highlighted hint could be disrupted by the old preview.\n dirty = self.hint_mouse_point.map_or(false, |p| p.line != point.line);\n self.hint_mouse_point = Some(point);\n self.window.set_mouse_cursor(CursorIcon::Pointer);\n } else if self.highlighted_hint.is_some() {\n self.hint_mouse_point = None;\n if term.mode().intersects(TermMode::MOUSE_MODE) && !term.mode().contains(TermMode::VI) {\n self.window.set_mouse_cursor(CursorIcon::Default);\n } else {\n self.window.set_mouse_cursor(CursorIcon::Text);\n }\n }\n\n dirty |= self.highlighted_hint != highlighted_hint;\n self.highlighted_hint = highlighted_hint;\n\n dirty\n }\n\n #[inline(never)]\n fn draw_ime_preview(\n &mut self,\n point: Point,\n fg: Rgb,\n bg: Rgb,\n rects: &mut Vec,\n config: &UiConfig,\n ) {\n let preedit = match self.ime.preedit() {\n Some(preedit) => preedit,\n None => {\n // In case we don't have preedit, just set the popup point.\n self.window.update_ime_position(point, &self.size_info);\n return;\n },\n };\n\n let num_cols = self.size_info.columns();\n\n // Get the visible preedit.\n let visible_text: String = match (preedit.cursor_byte_offset, preedit.cursor_end_offset) {\n (Some(byte_offset), Some(end_offset)) if end_offset > num_cols => StrShortener::new(\n &preedit.text[byte_offset..],\n num_cols,\n ShortenDirection::Right,\n Some(SHORTENER),\n ),\n _ => {\n StrShortener::new(&preedit.text, num_cols, ShortenDirection::Left, Some(SHORTENER))\n },\n }\n .collect();\n\n let visible_len = visible_text.chars().count();\n\n let end = cmp::min(point.column.0 + visible_len, num_cols);\n let start = end.saturating_sub(visible_len);\n\n let start = Point::new(point.line, Column(start));\n let end = Point::new(point.line, Column(end - 1));\n\n let glyph_cache = &mut self.glyph_cache;\n let metrics = glyph_cache.font_metrics();\n\n self.renderer.draw_string(\n start,\n fg,\n bg,\n visible_text.chars(),\n &self.size_info,\n glyph_cache,\n );\n\n // Damage preedit inside the terminal viewport.\n if self.collect_damage() && point.line < self.size_info.screen_lines() {\n let damage = LineDamageBounds::new(start.line, 0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n // Add underline for preedit text.\n let underline = RenderLine { start, end, color: fg };\n rects.extend(underline.rects(Flags::UNDERLINE, &metrics, &self.size_info));\n\n let ime_popup_point = match preedit.cursor_end_offset {\n Some(cursor_end_offset) if cursor_end_offset != 0 => {\n let is_wide = preedit.text[preedit.cursor_byte_offset.unwrap_or_default()..]\n .chars()\n .next()\n .map(|ch| ch.width() == Some(2))\n .unwrap_or_default();\n\n let cursor_column = Column(\n (end.column.0 as isize - cursor_end_offset as isize + 1).max(0) as usize,\n );\n let cursor_point = Point::new(point.line, cursor_column);\n let cursor =\n RenderableCursor::new(cursor_point, CursorShape::HollowBlock, fg, is_wide);\n rects.extend(cursor.rects(&self.size_info, config.cursor.thickness()));\n cursor_point\n },\n _ => end,\n };\n\n self.window.update_ime_position(ime_popup_point, &self.size_info);\n }\n\n /// Format search regex to account for the cursor and fullwidth characters.\n fn format_search(search_regex: &str, search_label: &str, max_width: usize) -> String {\n let label_len = search_label.len();\n\n // Skip `search_regex` formatting if only label is visible.\n if label_len > max_width {\n return search_label[..max_width].to_owned();\n }\n\n // The search string consists of `search_label` + `search_regex` + `cursor`.\n let mut bar_text = String::from(search_label);\n bar_text.extend(StrShortener::new(\n search_regex,\n max_width.wrapping_sub(label_len + 1),\n ShortenDirection::Left,\n Some(SHORTENER),\n ));\n\n // Add place for cursor.\n bar_text.push(' ');\n\n bar_text\n }\n\n /// Draw preview for the currently highlighted `Hyperlink`.\n #[inline(never)]\n fn draw_hyperlink_preview(\n &mut self,\n config: &UiConfig,\n cursor_point: Option,\n display_offset: usize,\n ) {\n let num_cols = self.size_info.columns();\n let uris: Vec<_> = self\n .highlighted_hint\n .iter()\n .chain(&self.vi_highlighted_hint)\n .filter_map(|hint| hint.hyperlink().map(|hyperlink| hyperlink.uri()))\n .map(|uri| StrShortener::new(uri, num_cols, ShortenDirection::Right, Some(SHORTENER)))\n .collect();\n\n if uris.is_empty() {\n return;\n }\n\n // The maximum amount of protected lines including the ones we'll show preview on.\n let max_protected_lines = uris.len() * 2;\n\n // Lines we shouldn't show preview on, because it'll obscure the highlighted hint.\n let mut protected_lines = Vec::with_capacity(max_protected_lines);\n if self.size_info.screen_lines() > max_protected_lines {\n // Prefer to show preview even when it'll likely obscure the highlighted hint, when\n // there's no place left for it.\n protected_lines.push(self.hint_mouse_point.map(|point| point.line));\n protected_lines.push(cursor_point.map(|point| point.line));\n }\n\n // Find the line in viewport we can draw preview on without obscuring protected lines.\n let viewport_bottom = self.size_info.bottommost_line() - Line(display_offset as i32);\n let viewport_top = viewport_bottom - (self.size_info.screen_lines() - 1);\n let uri_lines = (viewport_top.0..=viewport_bottom.0)\n .rev()\n .map(|line| Some(Line(line)))\n .filter_map(|line| {\n if protected_lines.contains(&line) {\n None\n } else {\n protected_lines.push(line);\n line\n }\n })\n .take(uris.len())\n .flat_map(|line| term::point_to_viewport(display_offset, Point::new(line, Column(0))));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n for (uri, point) in uris.into_iter().zip(uri_lines) {\n // Damage the uri preview.\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n\n // Damage the uri preview for the next frame as well.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n self.renderer.draw_string(point, fg, bg, uri, &self.size_info, &mut self.glyph_cache);\n }\n }\n\n /// Draw current search regex.\n #[inline(never)]\n fn draw_search(&mut self, config: &UiConfig, text: &str) {\n // Assure text length is at least num_cols.\n let num_cols = self.size_info.columns();\n let text = format!(\"{:<1$}\", text, num_cols);\n\n let point = Point::new(self.size_info.screen_lines(), Column(0));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n\n self.renderer.draw_string(\n point,\n fg,\n bg,\n text.chars(),\n &self.size_info,\n &mut self.glyph_cache,\n );\n }\n\n /// Draw render timer.\n #[inline(never)]\n fn draw_render_timer(&mut self, config: &UiConfig) {\n if !config.debug.render_timer {\n return;\n }\n\n let timing = format!(\"{:.3} usec\", self.meter.average());\n let point = Point::new(self.size_info.screen_lines().saturating_sub(2), Column(0));\n let fg = config.colors.primary.background;\n let bg = config.colors.normal.red;\n\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, timing.len());\n self.damage_tracker.frame().damage_line(damage);\n // Damage the render timer for the next frame.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, timing.chars(), &self.size_info, glyph_cache);\n }\n\n /// Draw an indicator for the position of a line in history.\n #[inline(never)]\n fn draw_line_indicator(\n &mut self,\n config: &UiConfig,\n total_lines: usize,\n obstructed_column: Option,\n line: usize,\n ) {\n let columns = self.size_info.columns();\n let text = format!(\"[{}/{}]\", line, total_lines - 1);\n let column = Column(self.size_info.columns().saturating_sub(text.len()));\n let point = Point::new(0, column);\n\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, columns - 1);\n self.damage_tracker.frame().damage_line(damage);\n // Damage it on the next frame in case it goes away.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let colors = &config.colors;\n let fg = colors.line_indicator.foreground.unwrap_or(colors.primary.background);\n let bg = colors.line_indicator.background.unwrap_or(colors.primary.foreground);\n\n // Do not render anything if it would obscure the vi mode cursor.\n if obstructed_column.map_or(true, |obstructed_column| obstructed_column < column) {\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, text.chars(), &self.size_info, glyph_cache);\n }\n }\n\n /// Returns `true` if damage information should be collected, `false` otherwise.\n #[inline]\n fn collect_damage(&self) -> bool {\n matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) || self.damage_tracker.debug\n }\n\n /// Highlight damaged rects.\n ///\n /// This function is for debug purposes only.\n fn highlight_damage(&self, render_rects: &mut Vec) {\n for damage_rect in &self.damage_tracker.shape_frame_damage(self.size_info.into()) {\n let x = damage_rect.x as f32;\n let height = damage_rect.height as f32;\n let width = damage_rect.width as f32;\n let y = damage_y_to_viewport_y(&self.size_info, damage_rect) as f32;\n let render_rect = RenderRect::new(x, y, width, height, DAMAGE_RECT_COLOR, 0.5);\n\n render_rects.push(render_rect);\n }\n }\n\n /// Request a new frame for a window on Wayland.\n fn request_frame(&mut self, scheduler: &mut Scheduler) {\n // Mark that we've used a frame.\n self.window.has_frame = false;\n\n // Get the display vblank interval.\n let monitor_vblank_interval = 1_000_000.\n / self\n .window\n .current_monitor()\n .and_then(|monitor| monitor.refresh_rate_millihertz())\n .unwrap_or(60_000) as f64;\n\n // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\nimpl Drop for Display {\n fn drop(&mut self) {\n // Switch OpenGL context before dropping, otherwise objects (like programs) from other\n // contexts might be deleted when dropping renderer.\n self.make_current();\n unsafe {\n ManuallyDrop::drop(&mut self.renderer);\n ManuallyDrop::drop(&mut self.context);\n ManuallyDrop::drop(&mut self.surface);\n }\n }\n}\n\n/// Input method state.\n#[derive(Debug, Default)]\npub struct Ime {\n /// Whether the IME is enabled.\n enabled: bool,\n\n /// Current IME preedit.\n preedit: Option,\n}\n\nimpl Ime {\n #[inline]\n pub fn set_enabled(&mut self, is_enabled: bool) {\n if is_enabled {\n self.enabled = is_enabled\n } else {\n // Clear state when disabling IME.\n *self = Default::default();\n }\n }\n\n #[inline]\n pub fn is_enabled(&self) -> bool {\n self.enabled\n }\n\n #[inline]\n pub fn set_preedit(&mut self, preedit: Option) {\n self.preedit = preedit;\n }\n\n #[inline]\n pub fn preedit(&self) -> Option<&Preedit> {\n self.preedit.as_ref()\n }\n}\n\n#[derive(Debug, Default, PartialEq, Eq)]\npub struct Preedit {\n /// The preedit text.\n text: String,\n\n /// Byte offset for cursor start into the preedit text.\n ///\n /// `None` means that the cursor is invisible.\n cursor_byte_offset: Option,\n\n /// The cursor offset from the end of the preedit in char width.\n cursor_end_offset: Option,\n}\n\nimpl Preedit {\n pub fn new(text: String, cursor_byte_offset: Option) -> Self {\n let cursor_end_offset = if let Some(byte_offset) = cursor_byte_offset {\n // Convert byte offset into char offset.\n let cursor_end_offset =\n text[byte_offset..].chars().fold(0, |acc, ch| acc + ch.width().unwrap_or(1));\n\n Some(cursor_end_offset)\n } else {\n None\n };\n\n Self { text, cursor_byte_offset, cursor_end_offset }\n }\n}\n\n/// Pending renderer updates.\n///\n/// All renderer updates are cached to be applied just before rendering, to avoid platform-specific\n/// rendering issues.\n#[derive(Debug, Default, Copy, Clone)]\npub struct RendererUpdate {\n /// Should resize the window.\n resize: bool,\n\n /// Clear font caches.\n clear_font_cache: bool,\n}\n\n/// Struct for safe in-place replacement.\n///\n/// This struct allows easily replacing struct fields that provide `self -> Self` methods in-place,\n/// without having to deal with constantly unwrapping the underlying [`Option`].\nstruct Replaceable(Option);\n\nimpl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\nimpl Deref for Replaceable {\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n self.get()\n }\n}\n\nimpl DerefMut for Replaceable {\n fn deref_mut(&mut self) -> &mut Self::Target {\n self.get_mut()\n }\n}\n\n/// The frame timer state.\npub struct FrameTimer {\n /// Base timestamp used to compute sync points.\n base: Instant,\n\n /// The last timestamp we synced to.\n last_synced_timestamp: Instant,\n\n /// The refresh rate we've used to compute sync timestamps.\n refresh_interval: Duration,\n}\n\nimpl FrameTimer {\n pub fn new() -> Self {\n let now = Instant::now();\n Self { base: now, last_synced_timestamp: now, refresh_interval: Duration::ZERO }\n }\n\n /// Compute the delay that we should use to achieve the target frame\n /// rate.\n pub fn compute_timeout(&mut self, refresh_interval: Duration) -> Duration {\n let now = Instant::now();\n\n // Handle refresh rate change.\n if self.refresh_interval != refresh_interval {\n self.base = now;\n self.last_synced_timestamp = now;\n self.refresh_interval = refresh_interval;\n return refresh_interval;\n }\n\n let next_frame = self.last_synced_timestamp + self.refresh_interval;\n\n if next_frame < now {\n // Redraw immediately if we haven't drawn in over `refresh_interval` microseconds.\n let elapsed_micros = (now - self.base).as_micros() as u64;\n let refresh_micros = self.refresh_interval.as_micros() as u64;\n self.last_synced_timestamp =\n now - Duration::from_micros(elapsed_micros % refresh_micros);\n Duration::ZERO\n } else {\n // Redraw on the next `refresh_interval` clock tick.\n self.last_synced_timestamp = next_frame;\n next_frame - now\n }\n }\n}\n\n/// Calculate the cell dimensions based on font metrics.\n///\n/// This will return a tuple of the cell width and height.\n#[inline]\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n let offset_x = f64::from(config.font.offset.x);\n let offset_y = f64::from(config.font.offset.y);\n (\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n )\n}\n\n/// Calculate the size of the window given padding, terminal dimensions and cell size.\nfn window_size(\n config: &UiConfig,\n dimensions: Dimensions,\n cell_width: f32,\n cell_height: f32,\n scale_factor: f32,\n) -> PhysicalSize {\n let padding = config.window.padding(scale_factor);\n\n let grid_width = cell_width * dimensions.columns.max(MIN_COLUMNS) as f32;\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n PhysicalSize::new(width as u32, height as u32)\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/display/mod.rs", "repo_name": "alacritty/alacritty", "num_chunks": 83, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 0, "content": "//! The display subsystem including window management, font rasterization, and\n//! GPU drawing.\n\nuse std::cmp;\nuse std::fmt::{self, Formatter};\nuse std::mem::{self, ManuallyDrop};\nuse std::num::NonZeroU32;\nuse std::ops::{Deref, DerefMut};\nuse std::time::{Duration, Instant};\n\nuse glutin::context::{NotCurrentContext, PossiblyCurrentContext};\nuse glutin::prelude::*;\nuse glutin::surface::{Surface, SwapInterval, WindowSurface};\n\nuse log::{debug, info};\nuse parking_lot::MutexGuard;\nuse raw_window_handle::RawWindowHandle;\nuse serde::{Deserialize, Serialize};\nuse winit::dpi::PhysicalSize;\nuse winit::keyboard::ModifiersState;\nuse winit::window::CursorIcon;\n\nuse crossfont::{Rasterize, Rasterizer, Size as FontSize};\nuse unicode_width::UnicodeWidthChar;\n\n", "meta": {"hash_id": "484264198542f621aca7f7e0469f3f5fccb23b1db7ac149238eed2ec5f595fd6"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 1, "content": "use alacritty_terminal::event::{EventListener, OnResize, WindowSize};\nuse alacritty_terminal::grid::Dimensions as TermDimensions;\nuse alacritty_terminal::index::{Column, Direction, Line, Point};\nuse alacritty_terminal::selection::Selection;\nuse alacritty_terminal::term::cell::Flags;\nuse alacritty_terminal::term::{\n self, point_to_viewport, LineDamageBounds, Term, TermDamage, TermMode, MIN_COLUMNS,\n MIN_SCREEN_LINES,\n};\nuse alacritty_terminal::vte::ansi::{CursorShape, NamedColor};\n\n", "meta": {"hash_id": "76d9a8e79ad1411705e13167d40249eed4f8d6364b22acd0869bfe12e7b56338"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 2, "content": "use crate::config::font::Font;\nuse crate::config::window::Dimensions;\n#[cfg(not(windows))]\nuse crate::config::window::StartupMode;\nuse crate::config::UiConfig;\nuse crate::display::bell::VisualBell;\nuse crate::display::color::{List, Rgb};\nuse crate::display::content::{RenderableContent, RenderableCursor};\nuse crate::display::cursor::IntoRects;\nuse crate::display::damage::{damage_y_to_viewport_y, DamageTracker};\nuse crate::display::hint::{HintMatch, HintState};\nuse crate::display::meter::Meter;\nuse crate::display::window::Window;\nuse crate::event::{Event, EventType, Mouse, SearchState};\nuse crate::message_bar::{MessageBuffer, MessageType};\nuse crate::renderer::rects::{RenderLine, RenderLines, RenderRect};\nuse crate::renderer::{self, GlyphCache, Renderer};\nuse crate::scheduler::{Scheduler, TimerId, Topic};\nuse crate::string::{ShortenDirection, StrShortener};\n\n", "meta": {"hash_id": "5658811313267444de24e0bc4280ed6897a88442d59a6d9a9805b10fae348225"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 3, "content": "pub mod color;\npub mod content;\npub mod cursor;\npub mod hint;\npub mod window;\n\nmod bell;\nmod damage;\nmod meter;\n\n/// Label for the forward terminal search bar.\nconst FORWARD_SEARCH_LABEL: &str = \"Search: \";\n\n/// Label for the backward terminal search bar.\nconst BACKWARD_SEARCH_LABEL: &str = \"Backward Search: \";\n\n/// The character used to shorten the visible text like uri preview or search regex.\nconst SHORTENER: char = '…';\n\n/// Color which is used to highlight damaged rects when debugging.\nconst DAMAGE_RECT_COLOR: Rgb = Rgb::new(255, 0, 255);\n\n#[derive(Debug)]\npub enum Error {\n /// Error with window management.\n Window(window::Error),\n\n", "meta": {"hash_id": "e81ad765dd04aa2f33ced2d8e1c335d66fa46e352bbad1d604f622daf6a07275"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 4, "content": " /// Error dealing with fonts.\n Font(crossfont::Error),\n\n /// Error in renderer.\n Render(renderer::Error),\n\n /// Error during context operations.\n Context(glutin::error::Error),\n}\n\nimpl std::error::Error for Error {\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n match self {\n Error::Window(err) => err.source(),\n Error::Font(err) => err.source(),\n Error::Render(err) => err.source(),\n Error::Context(err) => err.source(),\n }\n }\n}\n\n", "meta": {"hash_id": "f9071da0991c1542dcd7921265508b2cbca7f6da227559a28ac1b5703c96a6e9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 5, "content": "impl fmt::Display for Error {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n match self {\n Error::Window(err) => err.fmt(f),\n Error::Font(err) => err.fmt(f),\n Error::Render(err) => err.fmt(f),\n Error::Context(err) => err.fmt(f),\n }\n }\n}\n\nimpl From for Error {\n fn from(val: window::Error) -> Self {\n Error::Window(val)\n }\n}\n\nimpl From for Error {\n fn from(val: crossfont::Error) -> Self {\n Error::Font(val)\n }\n}\n\nimpl From for Error {\n fn from(val: renderer::Error) -> Self {\n Error::Render(val)\n }\n}\n\nimpl From for Error {\n fn from(val: glutin::error::Error) -> Self {\n Error::Context(val)\n }\n}\n\n", "meta": {"hash_id": "c7e78722d16533c3203bf8a963bf81552ccb100fb4e41c7d835e169d92c4286d"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 6, "content": "/// Terminal size info.\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]\npub struct SizeInfo {\n /// Terminal window width.\n width: T,\n\n /// Terminal window height.\n height: T,\n\n /// Width of individual cell.\n cell_width: T,\n\n /// Height of individual cell.\n cell_height: T,\n\n /// Horizontal window padding.\n padding_x: T,\n\n /// Vertical window padding.\n padding_y: T,\n\n /// Number of lines in the viewport.\n screen_lines: usize,\n\n /// Number of columns in the viewport.\n columns: usize,\n}\n\n", "meta": {"hash_id": "42a9f7ba3845fe29a133013618f15cf26f8cbe47a4a9220416f8edf6af47996c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 7, "content": "impl From> for SizeInfo {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n width: size_info.width as u32,\n height: size_info.height as u32,\n cell_width: size_info.cell_width as u32,\n cell_height: size_info.cell_height as u32,\n padding_x: size_info.padding_x as u32,\n padding_y: size_info.padding_y as u32,\n screen_lines: size_info.screen_lines,\n columns: size_info.screen_lines,\n }\n }\n}\n\n", "meta": {"hash_id": "322cbccb60ecbba5bfc46ccdc2a6ecb6e78a7002c0426a282f6371e3946e72bd"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 8, "content": "impl From> for WindowSize {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n num_cols: size_info.columns() as u16,\n num_lines: size_info.screen_lines() as u16,\n cell_width: size_info.cell_width() as u16,\n cell_height: size_info.cell_height() as u16,\n }\n }\n}\n\nimpl SizeInfo {\n #[inline]\n pub fn width(&self) -> T {\n self.width\n }\n\n #[inline]\n pub fn height(&self) -> T {\n self.height\n }\n\n #[inline]\n pub fn cell_width(&self) -> T {\n self.cell_width\n }\n\n #[inline]\n pub fn cell_height(&self) -> T {\n self.cell_height\n }\n\n #[inline]\n pub fn padding_x(&self) -> T {\n self.padding_x\n }\n\n #[inline]\n pub fn padding_y(&self) -> T {\n self.padding_y\n }\n}\n\n", "meta": {"hash_id": "d3da5d4d6a42319a17785d3cb52f22fb2532996de360fb0a70d1a10dc581ef57"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 9, "content": "impl SizeInfo {\n #[allow(clippy::too_many_arguments)]\n pub fn new(\n width: f32,\n height: f32,\n cell_width: f32,\n cell_height: f32,\n mut padding_x: f32,\n mut padding_y: f32,\n dynamic_padding: bool,\n ) -> SizeInfo {\n if dynamic_padding {\n padding_x = Self::dynamic_padding(padding_x.floor(), width, cell_width);\n padding_y = Self::dynamic_padding(padding_y.floor(), height, cell_height);\n }\n\n", "meta": {"hash_id": "fa70b304bccabbf388746322eebdfc35277b964c83b6baedb11d87538395744a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 10, "content": " let lines = (height - 2. * padding_y) / cell_height;\n let screen_lines = cmp::max(lines as usize, MIN_SCREEN_LINES);\n\n let columns = (width - 2. * padding_x) / cell_width;\n let columns = cmp::max(columns as usize, MIN_COLUMNS);\n\n SizeInfo {\n width,\n height,\n cell_width,\n cell_height,\n padding_x: padding_x.floor(),\n padding_y: padding_y.floor(),\n screen_lines,\n columns,\n }\n }\n\n #[inline]\n pub fn reserve_lines(&mut self, count: usize) {\n self.screen_lines = cmp::max(self.screen_lines.saturating_sub(count), MIN_SCREEN_LINES);\n }\n\n", "meta": {"hash_id": "332446484a520ca0002f602db88ae1228f7b7aa8adbf2e83ab0fd1df748d64a3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 11, "content": " /// Check if coordinates are inside the terminal grid.\n ///\n /// The padding, message bar or search are not counted as part of the grid.\n #[inline]\n pub fn contains_point(&self, x: usize, y: usize) -> bool {\n x <= (self.padding_x + self.columns as f32 * self.cell_width) as usize\n && x > self.padding_x as usize\n && y <= (self.padding_y + self.screen_lines as f32 * self.cell_height) as usize\n && y > self.padding_y as usize\n }\n\n", "meta": {"hash_id": "ef5f151414c4d3a796a840d12e29c2593453898ae0a1bf9a3c4402e62494d7b3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 12, "content": " /// Calculate padding to spread it evenly around the terminal content.\n #[inline]\n fn dynamic_padding(padding: f32, dimension: f32, cell_dimension: f32) -> f32 {\n padding + ((dimension - 2. * padding) % cell_dimension) / 2.\n }\n}\n\nimpl TermDimensions for SizeInfo {\n #[inline]\n fn columns(&self) -> usize {\n self.columns\n }\n\n #[inline]\n fn screen_lines(&self) -> usize {\n self.screen_lines\n }\n\n #[inline]\n fn total_lines(&self) -> usize {\n self.screen_lines()\n }\n}\n\n", "meta": {"hash_id": "73e5fe0b56a019ff26ad2381ff4b03e23d7765f4c505ed3d25a49d935c055637"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 13, "content": "#[derive(Default, Clone, Debug, PartialEq, Eq)]\npub struct DisplayUpdate {\n pub dirty: bool,\n\n dimensions: Option>,\n cursor_dirty: bool,\n font: Option,\n}\n\nimpl DisplayUpdate {\n pub fn dimensions(&self) -> Option> {\n self.dimensions\n }\n\n pub fn font(&self) -> Option<&Font> {\n self.font.as_ref()\n }\n\n pub fn cursor_dirty(&self) -> bool {\n self.cursor_dirty\n }\n\n pub fn set_dimensions(&mut self, dimensions: PhysicalSize) {\n self.dimensions = Some(dimensions);\n self.dirty = true;\n }\n\n pub fn set_font(&mut self, font: Font) {\n self.font = Some(font);\n self.dirty = true;\n }\n\n pub fn set_cursor_dirty(&mut self) {\n self.cursor_dirty = true;\n self.dirty = true;\n }\n}\n\n", "meta": {"hash_id": "e4f08dc76384cfa77f446aae70d820d82d9adea80d638f8db7324a8f69d73b5d"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 14, "content": "/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n", "meta": {"hash_id": "3811d9736c47f355a8ad4b929c285e77eb099350e68420142636952a39f2223e"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 15, "content": " /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n", "meta": {"hash_id": "b15f77642e7ceded8462e22b6ac3f81c2205a35d59841fdc3fe6ad8c4ab9f3ae"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 16, "content": " let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n", "meta": {"hash_id": "72b2e89ab1dc3d3689c6c98fec13bb24aadee6692a3837c6580dbc15874b986a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 17, "content": " // Create the GL surface to draw into.\n let surface = renderer::platform::create_gl_surface(\n &gl_context,\n window.inner_size(),\n window.raw_window_handle(),\n )?;\n\n // Make the context current.\n let context = gl_context.make_current(&surface)?;\n\n // Create renderer.\n let mut renderer = Renderer::new(&context, config.debug.renderer)?;\n\n", "meta": {"hash_id": "e835ec5d07329f713115a3c6273e5eb65798fd8f337f30af00c2bd2cca72fde9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 18, "content": " // Load font common glyphs to accelerate rendering.\n debug!(\"Filling glyph cache with common glyphs\");\n renderer.with_loader(|mut api| {\n glyph_cache.reset_glyph_cache(&mut api);\n });\n\n let padding = config.window.padding(window.scale_factor as f32);\n let viewport_size = window.inner_size();\n\n // Create new size with at least one column and row.\n let size_info = SizeInfo::new(\n viewport_size.width as f32,\n viewport_size.height as f32,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding && config.window.dimensions().is_none(),\n );\n\n", "meta": {"hash_id": "91bb3ae5c6b4871b1e845d262a3a43b180f51fb8d6f21ecce16cf8e1c92501f3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 19, "content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n info!(\"Padding: {} x {}\", size_info.padding_x(), size_info.padding_y());\n info!(\"Width: {}, Height: {}\", size_info.width(), size_info.height());\n\n // Update OpenGL projection.\n renderer.resize(&size_info);\n\n // Clear screen.\n let background_color = config.colors.primary.background;\n renderer.clear(background_color, config.window_opacity());\n\n // Disable shadows for transparent windows on macOS.\n #[cfg(target_os = \"macos\")]\n window.set_has_shadow(config.window_opacity() >= 1.0);\n\n let is_wayland = matches!(raw_window_handle, RawWindowHandle::Wayland(_));\n\n", "meta": {"hash_id": "7faf9dbeb3a6455f09d242144a94e725f681ff8c08ea0280f3de2b22f4d55f6c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 20, "content": " // On Wayland we can safely ignore this call, since the window isn't visible until you\n // actually draw something into it and commit those changes.\n if !is_wayland {\n surface.swap_buffers(&context).expect(\"failed to swap buffers.\");\n renderer.finish();\n }\n\n // Set resize increments for the newly created window.\n if config.window.resize_increments {\n window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n window.set_visible(true);\n\n", "meta": {"hash_id": "c5c51f4f8a09dcfa7cd5c7d48a5506f82a3044f317efeafd10d19b70c5ac9b7b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 21, "content": " #[allow(clippy::single_match)]\n #[cfg(not(windows))]\n if !_tabbed {\n match config.window.startup_mode {\n #[cfg(target_os = \"macos\")]\n StartupMode::SimpleFullscreen => window.set_simple_fullscreen(true),\n StartupMode::Maximized if !is_wayland => window.set_maximized(true),\n _ => (),\n }\n }\n\n let hint_state = HintState::new(config.hints.alphabet());\n\n let mut damage_tracker = DamageTracker::new(size_info.screen_lines(), size_info.columns());\n damage_tracker.debug = config.debug.highlight_damage;\n\n // Disable vsync.\n if let Err(err) = surface.set_swap_interval(&context, SwapInterval::DontWait) {\n info!(\"Failed to disable vsync: {}\", err);\n }\n\n", "meta": {"hash_id": "35aa2a8614b0447dfb154a87dfb78adb124b3317914b16156fed58fd3f314225"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 22, "content": " Ok(Self {\n context: ManuallyDrop::new(Replaceable::new(context)),\n visual_bell: VisualBell::from(&config.bell),\n renderer: ManuallyDrop::new(renderer),\n surface: ManuallyDrop::new(surface),\n colors: List::from(&config.colors),\n frame_timer: FrameTimer::new(),\n raw_window_handle,\n damage_tracker,\n glyph_cache,\n hint_state,\n size_info,\n font_size,\n window,\n pending_renderer_update: Default::default(),\n vi_highlighted_hint: Default::default(),\n highlighted_hint: Default::default(),\n hint_mouse_point: Default::default(),\n pending_update: Default::default(),\n cursor_hidden: Default::default(),\n meter: Default::default(),\n ime: Default::default(),\n })\n }\n\n", "meta": {"hash_id": "e768c80e459a073329b21eb0d348a82d396b8fffaf27b945fdeba4d425f7b5ca"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 23, "content": " #[inline]\n pub fn gl_context(&self) -> &PossiblyCurrentContext {\n self.context.get()\n }\n\n pub fn make_not_current(&mut self) {\n if self.context.get().is_current() {\n self.context.replace_with(|context| {\n context\n .make_not_current()\n .expect(\"failed to disable context\")\n .treat_as_possibly_current()\n });\n }\n }\n\n pub fn make_current(&self) {\n if !self.context.get().is_current() {\n self.context.make_current(&self.surface).expect(\"failed to make context current\")\n }\n }\n\n", "meta": {"hash_id": "aa553a1499d68d6e0956646608fa67bb13518dc62eb65df6a3070b35c474038f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 24, "content": " fn swap_buffers(&self) {\n #[allow(clippy::single_match)]\n let res = match (self.surface.deref(), &self.context.get()) {\n #[cfg(not(any(target_os = \"macos\", windows)))]\n (Surface::Egl(surface), PossiblyCurrentContext::Egl(context))\n if matches!(self.raw_window_handle, RawWindowHandle::Wayland(_))\n && !self.damage_tracker.debug =>\n {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n surface.swap_buffers_with_damage(context, &damage)\n },\n (surface, context) => surface.swap_buffers(context),\n };\n if let Err(err) = res {\n debug!(\"error calling swap_buffers: {}\", err);\n }\n }\n\n", "meta": {"hash_id": "b668652e20a43061c67b22b03f56c114a7dc72bd75a21da776c9356ff2154af0"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 25, "content": " /// Update font size and cell dimensions.\n ///\n /// This will return a tuple of the cell width and height.\n fn update_font_size(\n glyph_cache: &mut GlyphCache,\n config: &UiConfig,\n font: &Font,\n ) -> (f32, f32) {\n let _ = glyph_cache.update_font_size(font);\n\n // Compute new cell sizes.\n compute_cell_size(config, &glyph_cache.font_metrics())\n }\n\n /// Reset glyph cache.\n fn reset_glyph_cache(&mut self) {\n let cache = &mut self.glyph_cache;\n self.renderer.with_loader(|mut api| {\n cache.reset_glyph_cache(&mut api);\n });\n }\n\n", "meta": {"hash_id": "fe9e2774ab5dfd564b6579cfb9b1ce85b2015177d221aef32e2d3ae14507cec4"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 26, "content": " // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n", "meta": {"hash_id": "ed1f56505a3374dd2edecc05ad0cee8c469187f9ecc26860492082baefccf9b5"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 27, "content": " let (mut cell_width, mut cell_height) =\n (self.size_info.cell_width(), self.size_info.cell_height());\n\n if pending_update.font().is_some() || pending_update.cursor_dirty() {\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.clear_font_cache = true\n }\n\n // Update font size and cell dimensions.\n if let Some(font) = pending_update.font() {\n let cell_dimensions = Self::update_font_size(&mut self.glyph_cache, config, font);\n cell_width = cell_dimensions.0;\n cell_height = cell_dimensions.1;\n\n", "meta": {"hash_id": "79e8568f712841aa209157ef132823f1306f62017a353e68b6113b714b554136"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 28, "content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n\n // Mark entire terminal as damaged since glyph size could change without cell size\n // changes.\n self.damage_tracker.frame().mark_fully_damaged();\n }\n\n let (mut width, mut height) = (self.size_info.width(), self.size_info.height());\n if let Some(dimensions) = pending_update.dimensions() {\n width = dimensions.width as f32;\n height = dimensions.height as f32;\n }\n\n", "meta": {"hash_id": "20a281598c4b5f24da35d80abaf1a660d7a05a7d30f7566041448bf4c6a49ad7"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 29, "content": " let padding = config.window.padding(self.window.scale_factor as f32);\n\n let mut new_size = SizeInfo::new(\n width,\n height,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding,\n );\n\n // Update number of column/lines in the viewport.\n let search_active = search_state.history_index.is_some();\n let message_bar_lines = message_buffer.message().map_or(0, |m| m.text(&new_size).len());\n let search_lines = usize::from(search_active);\n new_size.reserve_lines(message_bar_lines + search_lines);\n\n", "meta": {"hash_id": "72069e14b9121905e307747adf2b90579c0aae90998058ad1320e1f17c892f17"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 30, "content": " // Update resize increments.\n if config.window.resize_increments {\n self.window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n // Resize when terminal when its dimensions have changed.\n if self.size_info.screen_lines() != new_size.screen_lines\n || self.size_info.columns() != new_size.columns()\n {\n // Resize PTY.\n pty_resize_handle.on_resize(new_size.into());\n\n", "meta": {"hash_id": "591f61d27ba4860ebe42874418318ca473425d226193d4c3803e71da91332e9a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 31, "content": " // Resize terminal.\n terminal.resize(new_size);\n\n // Resize damage tracking.\n self.damage_tracker.resize(new_size.screen_lines(), new_size.columns());\n }\n\n // Check if dimensions have changed.\n if new_size != self.size_info {\n // Queue renderer update.\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.resize = true;\n\n // Clear focused search match.\n search_state.clear_focused_match();\n }\n self.size_info = new_size;\n }\n\n", "meta": {"hash_id": "cbd894d544aaeadcbfea18922999949498c11df57b4722c93f1d487f08ede145"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 32, "content": " // NOTE: Renderer updates are split off, since platforms like Wayland require resize and other\n // OpenGL operations to be performed right before rendering. Otherwise they could lock the\n // back buffer and render with the previous state. This also solves flickering during resizes.\n //\n /// Update the state of the renderer.\n pub fn process_renderer_update(&mut self) {\n let renderer_update = match self.pending_renderer_update.take() {\n Some(renderer_update) => renderer_update,\n _ => return,\n };\n\n", "meta": {"hash_id": "bbf795ca1bbb43287648230c60608c2d4c4a558dfac639cf85cee2a03f842120"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 33, "content": " // Resize renderer.\n if renderer_update.resize {\n let width = NonZeroU32::new(self.size_info.width() as u32).unwrap();\n let height = NonZeroU32::new(self.size_info.height() as u32).unwrap();\n self.surface.resize(&self.context, width, height);\n }\n\n // Ensure we're modifying the correct OpenGL context.\n self.make_current();\n\n if renderer_update.clear_font_cache {\n self.reset_glyph_cache();\n }\n\n self.renderer.resize(&self.size_info);\n\n info!(\"Padding: {} x {}\", self.size_info.padding_x(), self.size_info.padding_y());\n info!(\"Width: {}, Height: {}\", self.size_info.width(), self.size_info.height());\n }\n\n", "meta": {"hash_id": "bae2fda06b24164a992b035f05346cec0db2aae06c91013b8901d87838639100"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 34, "content": " /// Draw the screen.\n ///\n /// A reference to Term whose state is being drawn must be provided.\n ///\n /// This call may block if vsync is enabled.\n pub fn draw(\n &mut self,\n mut terminal: MutexGuard<'_, Term>,\n scheduler: &mut Scheduler,\n message_buffer: &MessageBuffer,\n config: &UiConfig,\n search_state: &mut SearchState,\n ) {\n // Collect renderable content before the terminal is dropped.\n let mut content = RenderableContent::new(config, self, &terminal, search_state);\n", "meta": {"hash_id": "4ee907a039f825726c2412e854fe4bc9a179f4904d347aed1d037b9b9432af2f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 35, "content": " let mut grid_cells = Vec::new();\n for cell in &mut content {\n grid_cells.push(cell);\n }\n let selection_range = content.selection_range();\n let foreground_color = content.color(NamedColor::Foreground as usize);\n let background_color = content.color(NamedColor::Background as usize);\n let display_offset = content.display_offset();\n let cursor = content.cursor();\n\n let cursor_point = terminal.grid().cursor.point;\n let total_lines = terminal.grid().total_lines();\n let metrics = self.glyph_cache.font_metrics();\n let size_info = self.size_info;\n\n let vi_mode = terminal.mode().contains(TermMode::VI);\n let vi_cursor_point = if vi_mode { Some(terminal.vi_mode_cursor.point) } else { None };\n\n", "meta": {"hash_id": "fe491ce79ac648bfff38ee67779efea880d2cd4a4839d5ec21b41f0815d5442e"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 36, "content": " // Add damage from the terminal.\n if self.collect_damage() {\n match terminal.damage() {\n TermDamage::Full => self.damage_tracker.frame().mark_fully_damaged(),\n TermDamage::Partial(damaged_lines) => {\n for damage in damaged_lines {\n self.damage_tracker.frame().damage_line(damage);\n }\n },\n }\n terminal.reset_damage();\n }\n\n // Drop terminal as early as possible to free lock.\n drop(terminal);\n\n // Add damage from alacritty's UI elements overlapping terminal.\n if self.collect_damage() {\n let requires_full_damage = self.visual_bell.intensity() != 0.\n || self.hint_state.active()\n || search_state.regex().is_some();\n\n", "meta": {"hash_id": "78edad84eefb6200419f24f43c2eafca931dce7eec6ca54851fe18f91979f03b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 37, "content": " if requires_full_damage {\n self.damage_tracker.frame().mark_fully_damaged();\n self.damage_tracker.next_frame().mark_fully_damaged();\n }\n\n let vi_cursor_viewport_point =\n vi_cursor_point.and_then(|cursor| point_to_viewport(display_offset, cursor));\n\n self.damage_tracker.damage_vi_cursor(vi_cursor_viewport_point);\n self.damage_tracker.damage_selection(selection_range, display_offset);\n }\n\n // Make sure this window's OpenGL context is active.\n self.make_current();\n\n self.renderer.clear(background_color, config.window_opacity());\n let mut lines = RenderLines::new();\n\n // Optimize loop hint comparator.\n let has_highlighted_hint =\n self.highlighted_hint.is_some() || self.vi_highlighted_hint.is_some();\n\n", "meta": {"hash_id": "fbf5a992b119745ae32862906a730ddbedac02888a88bc5edd93d7cfaa3d4982"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 38, "content": " // Draw grid.\n {\n let _sampler = self.meter.sampler();\n\n // Ensure macOS hasn't reset our viewport.\n #[cfg(target_os = \"macos\")]\n self.renderer.set_viewport(&size_info);\n\n let glyph_cache = &mut self.glyph_cache;\n let highlighted_hint = &self.highlighted_hint;\n let vi_highlighted_hint = &self.vi_highlighted_hint;\n let damage_tracker = &mut self.damage_tracker;\n\n self.renderer.draw_cells(\n &size_info,\n glyph_cache,\n grid_cells.into_iter().map(|mut cell| {\n // Underline hints hovered by mouse or vi mode cursor.\n let point = term::viewport_to_point(display_offset, cell.point);\n\n", "meta": {"hash_id": "8c40ac29292bd9425a2fcef0f8c826637888ee41be53e36d9a257d3c3a7c15a3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 39, "content": " if has_highlighted_hint {\n let hyperlink =\n cell.extra.as_ref().and_then(|extra| extra.hyperlink.as_ref());\n if highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n || vi_highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n {\n cell.flags.insert(Flags::UNDERLINE);\n // Damage hints for the current and next frames.\n damage_tracker.frame().damage_point(cell.point);\n damage_tracker.next_frame().damage_point(cell.point);\n }\n }\n\n", "meta": {"hash_id": "3520acced525739e717b6e0e00879a0189cd42190601b2223baa6ad5555801e4"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 40, "content": " // Update underline/strikeout.\n lines.update(&cell);\n\n cell\n }),\n );\n }\n\n let mut rects = lines.rects(&metrics, &size_info);\n\n if let Some(vi_cursor_point) = vi_cursor_point {\n // Indicate vi mode by showing the cursor's position in the top right corner.\n let line = (-vi_cursor_point.line.0 + size_info.bottommost_line().0) as usize;\n let obstructed_column = Some(vi_cursor_point)\n .filter(|point| point.line == -(display_offset as i32))\n .map(|point| point.column);\n self.draw_line_indicator(config, total_lines, obstructed_column, line);\n } else if search_state.regex().is_some() {\n // Show current display offset in vi-less search to indicate match position.\n self.draw_line_indicator(config, total_lines, None, display_offset);\n };\n\n", "meta": {"hash_id": "298265debd2bb5d01f2becadcd2b913ad31b2ad76776a82a8cdc3f36307c54dd"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 41, "content": " // Draw cursor.\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n\n // Push visual bell after url/underline/strikeout rects.\n let visual_bell_intensity = self.visual_bell.intensity();\n if visual_bell_intensity != 0. {\n let visual_bell_rect = RenderRect::new(\n 0.,\n 0.,\n size_info.width(),\n size_info.height(),\n config.bell.color,\n visual_bell_intensity as f32,\n );\n rects.push(visual_bell_rect);\n }\n\n", "meta": {"hash_id": "18599d3454e142675535df415c5701663d07bffa7f8411f237c35a3d22c94e8f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 42, "content": " // Handle IME positioning and search bar rendering.\n let ime_position = match search_state.regex() {\n Some(regex) => {\n let search_label = match search_state.direction() {\n Direction::Right => FORWARD_SEARCH_LABEL,\n Direction::Left => BACKWARD_SEARCH_LABEL,\n };\n\n let search_text = Self::format_search(regex, search_label, size_info.columns());\n\n // Render the search bar.\n self.draw_search(config, &search_text);\n\n // Draw search bar cursor.\n let line = size_info.screen_lines();\n let column = Column(search_text.chars().count() - 1);\n\n", "meta": {"hash_id": "c20bfcb216ac981870022b5ffddbfa9b53bc1cf353566e9a690e17e3f4d6f544"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 43, "content": " // Add cursor to search bar if IME is not active.\n if self.ime.preedit().is_none() {\n let fg = config.colors.footer_bar_foreground();\n let shape = CursorShape::Underline;\n let cursor = RenderableCursor::new(Point::new(line, column), shape, fg, false);\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n }\n\n Some(Point::new(line, column))\n },\n None => {\n let num_lines = self.size_info.screen_lines();\n term::point_to_viewport(display_offset, cursor_point)\n .filter(|point| point.line < num_lines)\n },\n };\n\n", "meta": {"hash_id": "350ea9d2cd9bdced937484a43d5eac3fbedf6ed7063a0928056225ec2ca7058e"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 44, "content": " // Handle IME.\n if self.ime.is_enabled() {\n if let Some(point) = ime_position {\n let (fg, bg) = if search_state.regex().is_some() {\n (config.colors.footer_bar_foreground(), config.colors.footer_bar_background())\n } else {\n (foreground_color, background_color)\n };\n\n self.draw_ime_preview(point, fg, bg, &mut rects, config);\n }\n }\n\n if let Some(message) = message_buffer.message() {\n let search_offset = usize::from(search_state.regex().is_some());\n let text = message.text(&size_info);\n\n", "meta": {"hash_id": "5b742665b72ab3ff0a08013a146530fe6e6830cef61bce1a7f79aebf1eb67555"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 45, "content": " // Create a new rectangle for the background.\n let start_line = size_info.screen_lines() + search_offset;\n let y = size_info.cell_height().mul_add(start_line as f32, size_info.padding_y());\n\n let bg = match message.ty() {\n MessageType::Error => config.colors.normal.red,\n MessageType::Warning => config.colors.normal.yellow,\n };\n\n", "meta": {"hash_id": "a0a75ee6d8209be0e7e86de929d23cb6b231aa453c4bceb95b062a5bdbd0d51b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 46, "content": " let x = 0;\n let width = size_info.width() as i32;\n let height = (size_info.height() - y) as i32;\n let message_bar_rect =\n RenderRect::new(x as f32, y, width as f32, height as f32, bg, 1.);\n\n // Push message_bar in the end, so it'll be above all other content.\n rects.push(message_bar_rect);\n\n // Always damage message bar, since it could have messages of the same size in it.\n self.damage_tracker.frame().add_viewport_rect(&size_info, x, y as i32, width, height);\n\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n\n", "meta": {"hash_id": "7ff46d8ef5953994171ae2588af562115c117c5a10cdaccffbff5610b61c30d9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 47, "content": " // Relay messages to the user.\n let glyph_cache = &mut self.glyph_cache;\n let fg = config.colors.primary.background;\n for (i, message_text) in text.iter().enumerate() {\n let point = Point::new(start_line + i, Column(0));\n self.renderer.draw_string(\n point,\n fg,\n bg,\n message_text.chars(),\n &size_info,\n glyph_cache,\n );\n }\n } else {\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n }\n\n", "meta": {"hash_id": "4301e36e0494c3864c04e5e18d5714c4a38bacf6c448ef0f4b761c5a76781744"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 48, "content": " self.draw_render_timer(config);\n\n // Draw hyperlink uri preview.\n if has_highlighted_hint {\n let cursor_point = vi_cursor_point.or(Some(cursor_point));\n self.draw_hyperlink_preview(config, cursor_point, display_offset);\n }\n\n // Notify winit that we're about to present.\n self.window.pre_present_notify();\n\n // Highlight damage for debugging.\n if self.damage_tracker.debug {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n let mut rects = Vec::with_capacity(damage.len());\n self.highlight_damage(&mut rects);\n self.renderer.draw_rects(&self.size_info, &metrics, rects);\n }\n\n // Clearing debug highlights from the previous frame requires full redraw.\n self.swap_buffers();\n\n", "meta": {"hash_id": "634ea3f07419e628e451619fac284b5833898edc6e6206e6477f6b9397a2e163"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 49, "content": " if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n", "meta": {"hash_id": "8193ff2c96e4891d02efb66bc8a53c84ac112a628fd77195c003536960b3267c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 50, "content": " /// Update the mouse/vi mode cursor hint highlighting.\n ///\n /// This will return whether the highlighted hints changed.\n pub fn update_highlighted_hints(\n &mut self,\n term: &Term,\n config: &UiConfig,\n mouse: &Mouse,\n modifiers: ModifiersState,\n ) -> bool {\n // Update vi mode cursor hint.\n let vi_highlighted_hint = if term.mode().contains(TermMode::VI) {\n let mods = ModifiersState::all();\n let point = term.vi_mode_cursor.point;\n hint::highlighted_at(term, config, point, mods)\n } else {\n None\n };\n let mut dirty = vi_highlighted_hint != self.vi_highlighted_hint;\n self.vi_highlighted_hint = vi_highlighted_hint;\n\n", "meta": {"hash_id": "ab85133046538b326b5da625910dbea49fdcfb5196b1302be9d7b38edfd2ecb1"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 51, "content": " // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n", "meta": {"hash_id": "c4e406929070cc79996d7fb380bd3196d1e17ec94dabeb5bf0058a1b83c8c4ca"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 52, "content": " // Update cursor shape.\n if highlighted_hint.is_some() {\n // If mouse changed the line, we should update the hyperlink preview, since the\n // highlighted hint could be disrupted by the old preview.\n dirty = self.hint_mouse_point.map_or(false, |p| p.line != point.line);\n self.hint_mouse_point = Some(point);\n self.window.set_mouse_cursor(CursorIcon::Pointer);\n } else if self.highlighted_hint.is_some() {\n self.hint_mouse_point = None;\n if term.mode().intersects(TermMode::MOUSE_MODE) && !term.mode().contains(TermMode::VI) {\n self.window.set_mouse_cursor(CursorIcon::Default);\n } else {\n self.window.set_mouse_cursor(CursorIcon::Text);\n }\n }\n\n", "meta": {"hash_id": "7a604714c947c7e581d69f25990ca030860f8076664f95c15614176b02f14f31"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 53, "content": " dirty |= self.highlighted_hint != highlighted_hint;\n self.highlighted_hint = highlighted_hint;\n\n dirty\n }\n\n #[inline(never)]\n fn draw_ime_preview(\n &mut self,\n point: Point,\n fg: Rgb,\n bg: Rgb,\n rects: &mut Vec,\n config: &UiConfig,\n ) {\n let preedit = match self.ime.preedit() {\n Some(preedit) => preedit,\n None => {\n // In case we don't have preedit, just set the popup point.\n self.window.update_ime_position(point, &self.size_info);\n return;\n },\n };\n\n let num_cols = self.size_info.columns();\n\n", "meta": {"hash_id": "36438e7aceac12b36ca1853743dbd414cc5a4eb8e6aa275d9cff045b7e2d4c88"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 54, "content": " // Get the visible preedit.\n let visible_text: String = match (preedit.cursor_byte_offset, preedit.cursor_end_offset) {\n (Some(byte_offset), Some(end_offset)) if end_offset > num_cols => StrShortener::new(\n &preedit.text[byte_offset..],\n num_cols,\n ShortenDirection::Right,\n Some(SHORTENER),\n ),\n _ => {\n StrShortener::new(&preedit.text, num_cols, ShortenDirection::Left, Some(SHORTENER))\n },\n }\n .collect();\n\n", "meta": {"hash_id": "02e3f7e39d36be809a0819dc9c541961971649fb2aa331c7c50fa4ec5e119978"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 55, "content": " let visible_len = visible_text.chars().count();\n\n let end = cmp::min(point.column.0 + visible_len, num_cols);\n let start = end.saturating_sub(visible_len);\n\n let start = Point::new(point.line, Column(start));\n let end = Point::new(point.line, Column(end - 1));\n\n let glyph_cache = &mut self.glyph_cache;\n let metrics = glyph_cache.font_metrics();\n\n self.renderer.draw_string(\n start,\n fg,\n bg,\n visible_text.chars(),\n &self.size_info,\n glyph_cache,\n );\n\n // Damage preedit inside the terminal viewport.\n if self.collect_damage() && point.line < self.size_info.screen_lines() {\n let damage = LineDamageBounds::new(start.line, 0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n", "meta": {"hash_id": "f4465b191a88d6c08068b0e33f39d884f63a67cc6829e896f24ecc1b96158940"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 56, "content": " // Add underline for preedit text.\n let underline = RenderLine { start, end, color: fg };\n rects.extend(underline.rects(Flags::UNDERLINE, &metrics, &self.size_info));\n\n let ime_popup_point = match preedit.cursor_end_offset {\n Some(cursor_end_offset) if cursor_end_offset != 0 => {\n let is_wide = preedit.text[preedit.cursor_byte_offset.unwrap_or_default()..]\n .chars()\n .next()\n .map(|ch| ch.width() == Some(2))\n .unwrap_or_default();\n\n", "meta": {"hash_id": "6a6a63878025944ce98f665e2365a98949fe34df15776aa07ce28cc0056d96e3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 57, "content": " let cursor_column = Column(\n (end.column.0 as isize - cursor_end_offset as isize + 1).max(0) as usize,\n );\n let cursor_point = Point::new(point.line, cursor_column);\n let cursor =\n RenderableCursor::new(cursor_point, CursorShape::HollowBlock, fg, is_wide);\n rects.extend(cursor.rects(&self.size_info, config.cursor.thickness()));\n cursor_point\n },\n _ => end,\n };\n\n", "meta": {"hash_id": "0999c2483d874c5f24ec5bbd6d71bd76a775140cbd0ab32eebb7e16a14576bbb"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 58, "content": " self.window.update_ime_position(ime_popup_point, &self.size_info);\n }\n\n /// Format search regex to account for the cursor and fullwidth characters.\n fn format_search(search_regex: &str, search_label: &str, max_width: usize) -> String {\n let label_len = search_label.len();\n\n // Skip `search_regex` formatting if only label is visible.\n if label_len > max_width {\n return search_label[..max_width].to_owned();\n }\n\n", "meta": {"hash_id": "3b5e31b08e83473461ab3d08a6586fcc45fe7f018892e0be697974a32cf99556"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 59, "content": " // The search string consists of `search_label` + `search_regex` + `cursor`.\n let mut bar_text = String::from(search_label);\n bar_text.extend(StrShortener::new(\n search_regex,\n max_width.wrapping_sub(label_len + 1),\n ShortenDirection::Left,\n Some(SHORTENER),\n ));\n\n // Add place for cursor.\n bar_text.push(' ');\n\n bar_text\n }\n\n", "meta": {"hash_id": "778d02069dba2c206695f9644f6da84d4059373e565c3a0a3582cb79e27fff53"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 60, "content": " /// Draw preview for the currently highlighted `Hyperlink`.\n #[inline(never)]\n fn draw_hyperlink_preview(\n &mut self,\n config: &UiConfig,\n cursor_point: Option,\n display_offset: usize,\n ) {\n let num_cols = self.size_info.columns();\n let uris: Vec<_> = self\n .highlighted_hint\n .iter()\n .chain(&self.vi_highlighted_hint)\n .filter_map(|hint| hint.hyperlink().map(|hyperlink| hyperlink.uri()))\n .map(|uri| StrShortener::new(uri, num_cols, ShortenDirection::Right, Some(SHORTENER)))\n .collect();\n\n", "meta": {"hash_id": "0b58c59db2b1bd7885858bc980e47aae6f8e8bb2087ed85e4fde922bfca11125"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 61, "content": " if uris.is_empty() {\n return;\n }\n\n // The maximum amount of protected lines including the ones we'll show preview on.\n let max_protected_lines = uris.len() * 2;\n\n // Lines we shouldn't show preview on, because it'll obscure the highlighted hint.\n let mut protected_lines = Vec::with_capacity(max_protected_lines);\n if self.size_info.screen_lines() > max_protected_lines {\n // Prefer to show preview even when it'll likely obscure the highlighted hint, when\n // there's no place left for it.\n protected_lines.push(self.hint_mouse_point.map(|point| point.line));\n protected_lines.push(cursor_point.map(|point| point.line));\n }\n\n", "meta": {"hash_id": "783ec3774b49fcca910661e627316fe5e9efbeffe175a624958653998950f878"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 62, "content": " // Find the line in viewport we can draw preview on without obscuring protected lines.\n let viewport_bottom = self.size_info.bottommost_line() - Line(display_offset as i32);\n let viewport_top = viewport_bottom - (self.size_info.screen_lines() - 1);\n let uri_lines = (viewport_top.0..=viewport_bottom.0)\n .rev()\n .map(|line| Some(Line(line)))\n .filter_map(|line| {\n if protected_lines.contains(&line) {\n None\n } else {\n protected_lines.push(line);\n line\n }\n })\n .take(uris.len())\n .flat_map(|line| term::point_to_viewport(display_offset, Point::new(line, Column(0))));\n\n", "meta": {"hash_id": "f16942c0759faa75a9115d4645762a3ad51c204eae0f9765e0f0c1e2d6b66385"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 63, "content": " let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n for (uri, point) in uris.into_iter().zip(uri_lines) {\n // Damage the uri preview.\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n\n // Damage the uri preview for the next frame as well.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n self.renderer.draw_string(point, fg, bg, uri, &self.size_info, &mut self.glyph_cache);\n }\n }\n\n", "meta": {"hash_id": "e310b116e108b9ec16c1b9f09d5e70f193b7b368a4b94c97682448e4fbb308c6"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 64, "content": " /// Draw current search regex.\n #[inline(never)]\n fn draw_search(&mut self, config: &UiConfig, text: &str) {\n // Assure text length is at least num_cols.\n let num_cols = self.size_info.columns();\n let text = format!(\"{:<1$}\", text, num_cols);\n\n let point = Point::new(self.size_info.screen_lines(), Column(0));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n\n", "meta": {"hash_id": "94c9b5e79fb5bd80da00b4ecbe94fd9020f9e4110ea6656f13003da0f41cda9a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 65, "content": " self.renderer.draw_string(\n point,\n fg,\n bg,\n text.chars(),\n &self.size_info,\n &mut self.glyph_cache,\n );\n }\n\n /// Draw render timer.\n #[inline(never)]\n fn draw_render_timer(&mut self, config: &UiConfig) {\n if !config.debug.render_timer {\n return;\n }\n\n let timing = format!(\"{:.3} usec\", self.meter.average());\n let point = Point::new(self.size_info.screen_lines().saturating_sub(2), Column(0));\n let fg = config.colors.primary.background;\n let bg = config.colors.normal.red;\n\n", "meta": {"hash_id": "704ee1cc058f89cf5781311565df241f3affc1a6a6b3665420dbf77bb5cad1af"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 66, "content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, timing.len());\n self.damage_tracker.frame().damage_line(damage);\n // Damage the render timer for the next frame.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, timing.chars(), &self.size_info, glyph_cache);\n }\n\n", "meta": {"hash_id": "490dbac953cf0c174cdebed878c4b2813294da5220a60dbca8b8f17160fdb1ad"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 67, "content": " /// Draw an indicator for the position of a line in history.\n #[inline(never)]\n fn draw_line_indicator(\n &mut self,\n config: &UiConfig,\n total_lines: usize,\n obstructed_column: Option,\n line: usize,\n ) {\n let columns = self.size_info.columns();\n let text = format!(\"[{}/{}]\", line, total_lines - 1);\n let column = Column(self.size_info.columns().saturating_sub(text.len()));\n let point = Point::new(0, column);\n\n", "meta": {"hash_id": "3808ecaf15da81a70e597a07f5c5da5ba4c17b1cf6f0dcfa2adf12f3bdbb475d"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 68, "content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, columns - 1);\n self.damage_tracker.frame().damage_line(damage);\n // Damage it on the next frame in case it goes away.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let colors = &config.colors;\n let fg = colors.line_indicator.foreground.unwrap_or(colors.primary.background);\n let bg = colors.line_indicator.background.unwrap_or(colors.primary.foreground);\n\n", "meta": {"hash_id": "5c1d0985bc1d7908fb5c317620576ac17fb3493eabd4d7d8fa3fefd35ab7ce9b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 69, "content": " // Do not render anything if it would obscure the vi mode cursor.\n if obstructed_column.map_or(true, |obstructed_column| obstructed_column < column) {\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, text.chars(), &self.size_info, glyph_cache);\n }\n }\n\n /// Returns `true` if damage information should be collected, `false` otherwise.\n #[inline]\n fn collect_damage(&self) -> bool {\n matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) || self.damage_tracker.debug\n }\n\n", "meta": {"hash_id": "2962a6868ce13297567593afea28482302fc693fe7e9f861f1a396aac68e724b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 70, "content": " /// Highlight damaged rects.\n ///\n /// This function is for debug purposes only.\n fn highlight_damage(&self, render_rects: &mut Vec) {\n for damage_rect in &self.damage_tracker.shape_frame_damage(self.size_info.into()) {\n let x = damage_rect.x as f32;\n let height = damage_rect.height as f32;\n let width = damage_rect.width as f32;\n let y = damage_y_to_viewport_y(&self.size_info, damage_rect) as f32;\n let render_rect = RenderRect::new(x, y, width, height, DAMAGE_RECT_COLOR, 0.5);\n\n", "meta": {"hash_id": "0f29cbbaec96d2dd0f1633b9ce7032e7f484e9c69d2639e8edd402f087889cbf"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 71, "content": " render_rects.push(render_rect);\n }\n }\n\n /// Request a new frame for a window on Wayland.\n fn request_frame(&mut self, scheduler: &mut Scheduler) {\n // Mark that we've used a frame.\n self.window.has_frame = false;\n\n // Get the display vblank interval.\n let monitor_vblank_interval = 1_000_000.\n / self\n .window\n .current_monitor()\n .and_then(|monitor| monitor.refresh_rate_millihertz())\n .unwrap_or(60_000) as f64;\n\n", "meta": {"hash_id": "cb8e18791a8d3f8818d32f820bb15fb163aa24833619cb3ad94e4d6e93bedbed"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 72, "content": " // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\n", "meta": {"hash_id": "30e322a3414ba5e54e2599830958a3907fe0b065035a8c9cb418bf4c1d72efcd"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 73, "content": "impl Drop for Display {\n fn drop(&mut self) {\n // Switch OpenGL context before dropping, otherwise objects (like programs) from other\n // contexts might be deleted when dropping renderer.\n self.make_current();\n unsafe {\n ManuallyDrop::drop(&mut self.renderer);\n ManuallyDrop::drop(&mut self.context);\n ManuallyDrop::drop(&mut self.surface);\n }\n }\n}\n\n", "meta": {"hash_id": "bddb13186a6094969454c24a3f01f270d1b6b0e11ab07064f85194819b7209c9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 74, "content": "/// Input method state.\n#[derive(Debug, Default)]\npub struct Ime {\n /// Whether the IME is enabled.\n enabled: bool,\n\n /// Current IME preedit.\n preedit: Option,\n}\n\nimpl Ime {\n #[inline]\n pub fn set_enabled(&mut self, is_enabled: bool) {\n if is_enabled {\n self.enabled = is_enabled\n } else {\n // Clear state when disabling IME.\n *self = Default::default();\n }\n }\n\n #[inline]\n pub fn is_enabled(&self) -> bool {\n self.enabled\n }\n\n #[inline]\n pub fn set_preedit(&mut self, preedit: Option) {\n self.preedit = preedit;\n }\n\n #[inline]\n pub fn preedit(&self) -> Option<&Preedit> {\n self.preedit.as_ref()\n }\n}\n\n#[derive(Debug, Default, PartialEq, Eq)]\npub struct Preedit {\n /// The preedit text.\n text: String,\n\n", "meta": {"hash_id": "3f9b2205ea82cbcbaa666a59e8888ccf1ec69f058ede785adacc2f44e5761c1a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 75, "content": " /// Byte offset for cursor start into the preedit text.\n ///\n /// `None` means that the cursor is invisible.\n cursor_byte_offset: Option,\n\n /// The cursor offset from the end of the preedit in char width.\n cursor_end_offset: Option,\n}\n\nimpl Preedit {\n pub fn new(text: String, cursor_byte_offset: Option) -> Self {\n let cursor_end_offset = if let Some(byte_offset) = cursor_byte_offset {\n // Convert byte offset into char offset.\n let cursor_end_offset =\n text[byte_offset..].chars().fold(0, |acc, ch| acc + ch.width().unwrap_or(1));\n\n", "meta": {"hash_id": "c1bdc732d49c892d75f5fd3a6d0ab64d0f23a87b1bf485ad1c252bd881d09ba2"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 76, "content": " Some(cursor_end_offset)\n } else {\n None\n };\n\n Self { text, cursor_byte_offset, cursor_end_offset }\n }\n}\n\n/// Pending renderer updates.\n///\n/// All renderer updates are cached to be applied just before rendering, to avoid platform-specific\n/// rendering issues.\n#[derive(Debug, Default, Copy, Clone)]\npub struct RendererUpdate {\n /// Should resize the window.\n resize: bool,\n\n /// Clear font caches.\n clear_font_cache: bool,\n}\n\n/// Struct for safe in-place replacement.\n///\n/// This struct allows easily replacing struct fields that provide `self -> Self` methods in-place,\n/// without having to deal with constantly unwrapping the underlying [`Option`].\nstruct Replaceable(Option);\n\n", "meta": {"hash_id": "91b7c3746a5eb87290de64f6be7afc6266d4e43ccc5cbec4eb0b4dfddbb6132f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 77, "content": "impl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\n", "meta": {"hash_id": "d3d4d8f8720963a367d8f5bf74244739d252412ce356f463bbe3ed09eb00f2de"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 78, "content": "impl Deref for Replaceable {\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n self.get()\n }\n}\n\nimpl DerefMut for Replaceable {\n fn deref_mut(&mut self) -> &mut Self::Target {\n self.get_mut()\n }\n}\n\n/// The frame timer state.\npub struct FrameTimer {\n /// Base timestamp used to compute sync points.\n base: Instant,\n\n /// The last timestamp we synced to.\n last_synced_timestamp: Instant,\n\n /// The refresh rate we've used to compute sync timestamps.\n refresh_interval: Duration,\n}\n\nimpl FrameTimer {\n pub fn new() -> Self {\n let now = Instant::now();\n Self { base: now, last_synced_timestamp: now, refresh_interval: Duration::ZERO }\n }\n\n", "meta": {"hash_id": "26064c14ae007ff16e77137660718d45e30767b2eaea74e0ab974f76a635a2b0"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 79, "content": " /// Compute the delay that we should use to achieve the target frame\n /// rate.\n pub fn compute_timeout(&mut self, refresh_interval: Duration) -> Duration {\n let now = Instant::now();\n\n // Handle refresh rate change.\n if self.refresh_interval != refresh_interval {\n self.base = now;\n self.last_synced_timestamp = now;\n self.refresh_interval = refresh_interval;\n return refresh_interval;\n }\n\n let next_frame = self.last_synced_timestamp + self.refresh_interval;\n\n", "meta": {"hash_id": "51cde94b51707ea3131697086f758f952d2245228eb81581b37f916f06f5824c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 80, "content": " if next_frame < now {\n // Redraw immediately if we haven't drawn in over `refresh_interval` microseconds.\n let elapsed_micros = (now - self.base).as_micros() as u64;\n let refresh_micros = self.refresh_interval.as_micros() as u64;\n self.last_synced_timestamp =\n now - Duration::from_micros(elapsed_micros % refresh_micros);\n Duration::ZERO\n } else {\n // Redraw on the next `refresh_interval` clock tick.\n self.last_synced_timestamp = next_frame;\n next_frame - now\n }\n }\n}\n\n", "meta": {"hash_id": "3b7b39e20e3e2536fde3cc1b209c02f9909c9eeedf7fde9dfdad330f1345359b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 81, "content": "/// Calculate the cell dimensions based on font metrics.\n///\n/// This will return a tuple of the cell width and height.\n#[inline]\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n let offset_x = f64::from(config.font.offset.x);\n let offset_y = f64::from(config.font.offset.y);\n (\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n )\n}\n\n", "meta": {"hash_id": "e60e12c31ebbe6a66ef26ed43c604bb403db5158fc9be3f62f599f20819fe982"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 82, "content": "/// Calculate the size of the window given padding, terminal dimensions and cell size.\nfn window_size(\n config: &UiConfig,\n dimensions: Dimensions,\n cell_width: f32,\n cell_height: f32,\n scale_factor: f32,\n) -> PhysicalSize {\n let padding = config.window.padding(scale_factor);\n\n let grid_width = cell_width * dimensions.columns.max(MIN_COLUMNS) as f32;\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n PhysicalSize::new(width as u32, height as u32)\n}\n", "meta": {"hash_id": "e522ae0b3b63eb29c51c18a5828f1c0bd1bc3facf9bd57c05e8128ab5abd7a1c"}}]}], "golden_chunks": [{"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 49, "content": " if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n", "meta": {"hash_id": "8193ff2c96e4891d02efb66bc8a53c84ac112a628fd77195c003536960b3267c"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/display/mod.rs", "repo_name": "alacritty/alacritty", "num_chunks": 83, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the FrameTimer struct?", "answer": "The FrameTimer struct is used to regulate the render frame rate and compute timeouts for frame drawing. It has a `refresh_interval` field to store the target duration between frames, and `base` and `last_synced_timestamp` fields to help compute the next frame timestamp. The `compute_timeout` method calculates the delay needed to reach the next frame timestamp based on the elapsed time since the last frame.", "golden_doc_uuids": ["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b"], "golden_chunk_uuids": [["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", 77], ["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", 72], ["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", 51], ["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", 26], ["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", 16], ["96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", 15]], "golden_documents": [{"uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "content": "//! The display subsystem including window management, font rasterization, and\n//! GPU drawing.\n\nuse std::cmp;\nuse std::fmt::{self, Formatter};\nuse std::mem::{self, ManuallyDrop};\nuse std::num::NonZeroU32;\nuse std::ops::{Deref, DerefMut};\nuse std::time::{Duration, Instant};\n\nuse glutin::context::{NotCurrentContext, PossiblyCurrentContext};\nuse glutin::prelude::*;\nuse glutin::surface::{Surface, SwapInterval, WindowSurface};\n\nuse log::{debug, info};\nuse parking_lot::MutexGuard;\nuse raw_window_handle::RawWindowHandle;\nuse serde::{Deserialize, Serialize};\nuse winit::dpi::PhysicalSize;\nuse winit::keyboard::ModifiersState;\nuse winit::window::CursorIcon;\n\nuse crossfont::{Rasterize, Rasterizer, Size as FontSize};\nuse unicode_width::UnicodeWidthChar;\n\nuse alacritty_terminal::event::{EventListener, OnResize, WindowSize};\nuse alacritty_terminal::grid::Dimensions as TermDimensions;\nuse alacritty_terminal::index::{Column, Direction, Line, Point};\nuse alacritty_terminal::selection::Selection;\nuse alacritty_terminal::term::cell::Flags;\nuse alacritty_terminal::term::{\n self, point_to_viewport, LineDamageBounds, Term, TermDamage, TermMode, MIN_COLUMNS,\n MIN_SCREEN_LINES,\n};\nuse alacritty_terminal::vte::ansi::{CursorShape, NamedColor};\n\nuse crate::config::font::Font;\nuse crate::config::window::Dimensions;\n#[cfg(not(windows))]\nuse crate::config::window::StartupMode;\nuse crate::config::UiConfig;\nuse crate::display::bell::VisualBell;\nuse crate::display::color::{List, Rgb};\nuse crate::display::content::{RenderableContent, RenderableCursor};\nuse crate::display::cursor::IntoRects;\nuse crate::display::damage::{damage_y_to_viewport_y, DamageTracker};\nuse crate::display::hint::{HintMatch, HintState};\nuse crate::display::meter::Meter;\nuse crate::display::window::Window;\nuse crate::event::{Event, EventType, Mouse, SearchState};\nuse crate::message_bar::{MessageBuffer, MessageType};\nuse crate::renderer::rects::{RenderLine, RenderLines, RenderRect};\nuse crate::renderer::{self, GlyphCache, Renderer};\nuse crate::scheduler::{Scheduler, TimerId, Topic};\nuse crate::string::{ShortenDirection, StrShortener};\n\npub mod color;\npub mod content;\npub mod cursor;\npub mod hint;\npub mod window;\n\nmod bell;\nmod damage;\nmod meter;\n\n/// Label for the forward terminal search bar.\nconst FORWARD_SEARCH_LABEL: &str = \"Search: \";\n\n/// Label for the backward terminal search bar.\nconst BACKWARD_SEARCH_LABEL: &str = \"Backward Search: \";\n\n/// The character used to shorten the visible text like uri preview or search regex.\nconst SHORTENER: char = '…';\n\n/// Color which is used to highlight damaged rects when debugging.\nconst DAMAGE_RECT_COLOR: Rgb = Rgb::new(255, 0, 255);\n\n#[derive(Debug)]\npub enum Error {\n /// Error with window management.\n Window(window::Error),\n\n /// Error dealing with fonts.\n Font(crossfont::Error),\n\n /// Error in renderer.\n Render(renderer::Error),\n\n /// Error during context operations.\n Context(glutin::error::Error),\n}\n\nimpl std::error::Error for Error {\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n match self {\n Error::Window(err) => err.source(),\n Error::Font(err) => err.source(),\n Error::Render(err) => err.source(),\n Error::Context(err) => err.source(),\n }\n }\n}\n\nimpl fmt::Display for Error {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n match self {\n Error::Window(err) => err.fmt(f),\n Error::Font(err) => err.fmt(f),\n Error::Render(err) => err.fmt(f),\n Error::Context(err) => err.fmt(f),\n }\n }\n}\n\nimpl From for Error {\n fn from(val: window::Error) -> Self {\n Error::Window(val)\n }\n}\n\nimpl From for Error {\n fn from(val: crossfont::Error) -> Self {\n Error::Font(val)\n }\n}\n\nimpl From for Error {\n fn from(val: renderer::Error) -> Self {\n Error::Render(val)\n }\n}\n\nimpl From for Error {\n fn from(val: glutin::error::Error) -> Self {\n Error::Context(val)\n }\n}\n\n/// Terminal size info.\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]\npub struct SizeInfo {\n /// Terminal window width.\n width: T,\n\n /// Terminal window height.\n height: T,\n\n /// Width of individual cell.\n cell_width: T,\n\n /// Height of individual cell.\n cell_height: T,\n\n /// Horizontal window padding.\n padding_x: T,\n\n /// Vertical window padding.\n padding_y: T,\n\n /// Number of lines in the viewport.\n screen_lines: usize,\n\n /// Number of columns in the viewport.\n columns: usize,\n}\n\nimpl From> for SizeInfo {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n width: size_info.width as u32,\n height: size_info.height as u32,\n cell_width: size_info.cell_width as u32,\n cell_height: size_info.cell_height as u32,\n padding_x: size_info.padding_x as u32,\n padding_y: size_info.padding_y as u32,\n screen_lines: size_info.screen_lines,\n columns: size_info.screen_lines,\n }\n }\n}\n\nimpl From> for WindowSize {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n num_cols: size_info.columns() as u16,\n num_lines: size_info.screen_lines() as u16,\n cell_width: size_info.cell_width() as u16,\n cell_height: size_info.cell_height() as u16,\n }\n }\n}\n\nimpl SizeInfo {\n #[inline]\n pub fn width(&self) -> T {\n self.width\n }\n\n #[inline]\n pub fn height(&self) -> T {\n self.height\n }\n\n #[inline]\n pub fn cell_width(&self) -> T {\n self.cell_width\n }\n\n #[inline]\n pub fn cell_height(&self) -> T {\n self.cell_height\n }\n\n #[inline]\n pub fn padding_x(&self) -> T {\n self.padding_x\n }\n\n #[inline]\n pub fn padding_y(&self) -> T {\n self.padding_y\n }\n}\n\nimpl SizeInfo {\n #[allow(clippy::too_many_arguments)]\n pub fn new(\n width: f32,\n height: f32,\n cell_width: f32,\n cell_height: f32,\n mut padding_x: f32,\n mut padding_y: f32,\n dynamic_padding: bool,\n ) -> SizeInfo {\n if dynamic_padding {\n padding_x = Self::dynamic_padding(padding_x.floor(), width, cell_width);\n padding_y = Self::dynamic_padding(padding_y.floor(), height, cell_height);\n }\n\n let lines = (height - 2. * padding_y) / cell_height;\n let screen_lines = cmp::max(lines as usize, MIN_SCREEN_LINES);\n\n let columns = (width - 2. * padding_x) / cell_width;\n let columns = cmp::max(columns as usize, MIN_COLUMNS);\n\n SizeInfo {\n width,\n height,\n cell_width,\n cell_height,\n padding_x: padding_x.floor(),\n padding_y: padding_y.floor(),\n screen_lines,\n columns,\n }\n }\n\n #[inline]\n pub fn reserve_lines(&mut self, count: usize) {\n self.screen_lines = cmp::max(self.screen_lines.saturating_sub(count), MIN_SCREEN_LINES);\n }\n\n /// Check if coordinates are inside the terminal grid.\n ///\n /// The padding, message bar or search are not counted as part of the grid.\n #[inline]\n pub fn contains_point(&self, x: usize, y: usize) -> bool {\n x <= (self.padding_x + self.columns as f32 * self.cell_width) as usize\n && x > self.padding_x as usize\n && y <= (self.padding_y + self.screen_lines as f32 * self.cell_height) as usize\n && y > self.padding_y as usize\n }\n\n /// Calculate padding to spread it evenly around the terminal content.\n #[inline]\n fn dynamic_padding(padding: f32, dimension: f32, cell_dimension: f32) -> f32 {\n padding + ((dimension - 2. * padding) % cell_dimension) / 2.\n }\n}\n\nimpl TermDimensions for SizeInfo {\n #[inline]\n fn columns(&self) -> usize {\n self.columns\n }\n\n #[inline]\n fn screen_lines(&self) -> usize {\n self.screen_lines\n }\n\n #[inline]\n fn total_lines(&self) -> usize {\n self.screen_lines()\n }\n}\n\n#[derive(Default, Clone, Debug, PartialEq, Eq)]\npub struct DisplayUpdate {\n pub dirty: bool,\n\n dimensions: Option>,\n cursor_dirty: bool,\n font: Option,\n}\n\nimpl DisplayUpdate {\n pub fn dimensions(&self) -> Option> {\n self.dimensions\n }\n\n pub fn font(&self) -> Option<&Font> {\n self.font.as_ref()\n }\n\n pub fn cursor_dirty(&self) -> bool {\n self.cursor_dirty\n }\n\n pub fn set_dimensions(&mut self, dimensions: PhysicalSize) {\n self.dimensions = Some(dimensions);\n self.dirty = true;\n }\n\n pub fn set_font(&mut self, font: Font) {\n self.font = Some(font);\n self.dirty = true;\n }\n\n pub fn set_cursor_dirty(&mut self) {\n self.cursor_dirty = true;\n self.dirty = true;\n }\n}\n\n/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n // Create the GL surface to draw into.\n let surface = renderer::platform::create_gl_surface(\n &gl_context,\n window.inner_size(),\n window.raw_window_handle(),\n )?;\n\n // Make the context current.\n let context = gl_context.make_current(&surface)?;\n\n // Create renderer.\n let mut renderer = Renderer::new(&context, config.debug.renderer)?;\n\n // Load font common glyphs to accelerate rendering.\n debug!(\"Filling glyph cache with common glyphs\");\n renderer.with_loader(|mut api| {\n glyph_cache.reset_glyph_cache(&mut api);\n });\n\n let padding = config.window.padding(window.scale_factor as f32);\n let viewport_size = window.inner_size();\n\n // Create new size with at least one column and row.\n let size_info = SizeInfo::new(\n viewport_size.width as f32,\n viewport_size.height as f32,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding && config.window.dimensions().is_none(),\n );\n\n info!(\"Cell size: {} x {}\", cell_width, cell_height);\n info!(\"Padding: {} x {}\", size_info.padding_x(), size_info.padding_y());\n info!(\"Width: {}, Height: {}\", size_info.width(), size_info.height());\n\n // Update OpenGL projection.\n renderer.resize(&size_info);\n\n // Clear screen.\n let background_color = config.colors.primary.background;\n renderer.clear(background_color, config.window_opacity());\n\n // Disable shadows for transparent windows on macOS.\n #[cfg(target_os = \"macos\")]\n window.set_has_shadow(config.window_opacity() >= 1.0);\n\n let is_wayland = matches!(raw_window_handle, RawWindowHandle::Wayland(_));\n\n // On Wayland we can safely ignore this call, since the window isn't visible until you\n // actually draw something into it and commit those changes.\n if !is_wayland {\n surface.swap_buffers(&context).expect(\"failed to swap buffers.\");\n renderer.finish();\n }\n\n // Set resize increments for the newly created window.\n if config.window.resize_increments {\n window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n window.set_visible(true);\n\n #[allow(clippy::single_match)]\n #[cfg(not(windows))]\n if !_tabbed {\n match config.window.startup_mode {\n #[cfg(target_os = \"macos\")]\n StartupMode::SimpleFullscreen => window.set_simple_fullscreen(true),\n StartupMode::Maximized if !is_wayland => window.set_maximized(true),\n _ => (),\n }\n }\n\n let hint_state = HintState::new(config.hints.alphabet());\n\n let mut damage_tracker = DamageTracker::new(size_info.screen_lines(), size_info.columns());\n damage_tracker.debug = config.debug.highlight_damage;\n\n // Disable vsync.\n if let Err(err) = surface.set_swap_interval(&context, SwapInterval::DontWait) {\n info!(\"Failed to disable vsync: {}\", err);\n }\n\n Ok(Self {\n context: ManuallyDrop::new(Replaceable::new(context)),\n visual_bell: VisualBell::from(&config.bell),\n renderer: ManuallyDrop::new(renderer),\n surface: ManuallyDrop::new(surface),\n colors: List::from(&config.colors),\n frame_timer: FrameTimer::new(),\n raw_window_handle,\n damage_tracker,\n glyph_cache,\n hint_state,\n size_info,\n font_size,\n window,\n pending_renderer_update: Default::default(),\n vi_highlighted_hint: Default::default(),\n highlighted_hint: Default::default(),\n hint_mouse_point: Default::default(),\n pending_update: Default::default(),\n cursor_hidden: Default::default(),\n meter: Default::default(),\n ime: Default::default(),\n })\n }\n\n #[inline]\n pub fn gl_context(&self) -> &PossiblyCurrentContext {\n self.context.get()\n }\n\n pub fn make_not_current(&mut self) {\n if self.context.get().is_current() {\n self.context.replace_with(|context| {\n context\n .make_not_current()\n .expect(\"failed to disable context\")\n .treat_as_possibly_current()\n });\n }\n }\n\n pub fn make_current(&self) {\n if !self.context.get().is_current() {\n self.context.make_current(&self.surface).expect(\"failed to make context current\")\n }\n }\n\n fn swap_buffers(&self) {\n #[allow(clippy::single_match)]\n let res = match (self.surface.deref(), &self.context.get()) {\n #[cfg(not(any(target_os = \"macos\", windows)))]\n (Surface::Egl(surface), PossiblyCurrentContext::Egl(context))\n if matches!(self.raw_window_handle, RawWindowHandle::Wayland(_))\n && !self.damage_tracker.debug =>\n {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n surface.swap_buffers_with_damage(context, &damage)\n },\n (surface, context) => surface.swap_buffers(context),\n };\n if let Err(err) = res {\n debug!(\"error calling swap_buffers: {}\", err);\n }\n }\n\n /// Update font size and cell dimensions.\n ///\n /// This will return a tuple of the cell width and height.\n fn update_font_size(\n glyph_cache: &mut GlyphCache,\n config: &UiConfig,\n font: &Font,\n ) -> (f32, f32) {\n let _ = glyph_cache.update_font_size(font);\n\n // Compute new cell sizes.\n compute_cell_size(config, &glyph_cache.font_metrics())\n }\n\n /// Reset glyph cache.\n fn reset_glyph_cache(&mut self) {\n let cache = &mut self.glyph_cache;\n self.renderer.with_loader(|mut api| {\n cache.reset_glyph_cache(&mut api);\n });\n }\n\n // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n let (mut cell_width, mut cell_height) =\n (self.size_info.cell_width(), self.size_info.cell_height());\n\n if pending_update.font().is_some() || pending_update.cursor_dirty() {\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.clear_font_cache = true\n }\n\n // Update font size and cell dimensions.\n if let Some(font) = pending_update.font() {\n let cell_dimensions = Self::update_font_size(&mut self.glyph_cache, config, font);\n cell_width = cell_dimensions.0;\n cell_height = cell_dimensions.1;\n\n info!(\"Cell size: {} x {}\", cell_width, cell_height);\n\n // Mark entire terminal as damaged since glyph size could change without cell size\n // changes.\n self.damage_tracker.frame().mark_fully_damaged();\n }\n\n let (mut width, mut height) = (self.size_info.width(), self.size_info.height());\n if let Some(dimensions) = pending_update.dimensions() {\n width = dimensions.width as f32;\n height = dimensions.height as f32;\n }\n\n let padding = config.window.padding(self.window.scale_factor as f32);\n\n let mut new_size = SizeInfo::new(\n width,\n height,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding,\n );\n\n // Update number of column/lines in the viewport.\n let search_active = search_state.history_index.is_some();\n let message_bar_lines = message_buffer.message().map_or(0, |m| m.text(&new_size).len());\n let search_lines = usize::from(search_active);\n new_size.reserve_lines(message_bar_lines + search_lines);\n\n // Update resize increments.\n if config.window.resize_increments {\n self.window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n // Resize when terminal when its dimensions have changed.\n if self.size_info.screen_lines() != new_size.screen_lines\n || self.size_info.columns() != new_size.columns()\n {\n // Resize PTY.\n pty_resize_handle.on_resize(new_size.into());\n\n // Resize terminal.\n terminal.resize(new_size);\n\n // Resize damage tracking.\n self.damage_tracker.resize(new_size.screen_lines(), new_size.columns());\n }\n\n // Check if dimensions have changed.\n if new_size != self.size_info {\n // Queue renderer update.\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.resize = true;\n\n // Clear focused search match.\n search_state.clear_focused_match();\n }\n self.size_info = new_size;\n }\n\n // NOTE: Renderer updates are split off, since platforms like Wayland require resize and other\n // OpenGL operations to be performed right before rendering. Otherwise they could lock the\n // back buffer and render with the previous state. This also solves flickering during resizes.\n //\n /// Update the state of the renderer.\n pub fn process_renderer_update(&mut self) {\n let renderer_update = match self.pending_renderer_update.take() {\n Some(renderer_update) => renderer_update,\n _ => return,\n };\n\n // Resize renderer.\n if renderer_update.resize {\n let width = NonZeroU32::new(self.size_info.width() as u32).unwrap();\n let height = NonZeroU32::new(self.size_info.height() as u32).unwrap();\n self.surface.resize(&self.context, width, height);\n }\n\n // Ensure we're modifying the correct OpenGL context.\n self.make_current();\n\n if renderer_update.clear_font_cache {\n self.reset_glyph_cache();\n }\n\n self.renderer.resize(&self.size_info);\n\n info!(\"Padding: {} x {}\", self.size_info.padding_x(), self.size_info.padding_y());\n info!(\"Width: {}, Height: {}\", self.size_info.width(), self.size_info.height());\n }\n\n /// Draw the screen.\n ///\n /// A reference to Term whose state is being drawn must be provided.\n ///\n /// This call may block if vsync is enabled.\n pub fn draw(\n &mut self,\n mut terminal: MutexGuard<'_, Term>,\n scheduler: &mut Scheduler,\n message_buffer: &MessageBuffer,\n config: &UiConfig,\n search_state: &mut SearchState,\n ) {\n // Collect renderable content before the terminal is dropped.\n let mut content = RenderableContent::new(config, self, &terminal, search_state);\n let mut grid_cells = Vec::new();\n for cell in &mut content {\n grid_cells.push(cell);\n }\n let selection_range = content.selection_range();\n let foreground_color = content.color(NamedColor::Foreground as usize);\n let background_color = content.color(NamedColor::Background as usize);\n let display_offset = content.display_offset();\n let cursor = content.cursor();\n\n let cursor_point = terminal.grid().cursor.point;\n let total_lines = terminal.grid().total_lines();\n let metrics = self.glyph_cache.font_metrics();\n let size_info = self.size_info;\n\n let vi_mode = terminal.mode().contains(TermMode::VI);\n let vi_cursor_point = if vi_mode { Some(terminal.vi_mode_cursor.point) } else { None };\n\n // Add damage from the terminal.\n if self.collect_damage() {\n match terminal.damage() {\n TermDamage::Full => self.damage_tracker.frame().mark_fully_damaged(),\n TermDamage::Partial(damaged_lines) => {\n for damage in damaged_lines {\n self.damage_tracker.frame().damage_line(damage);\n }\n },\n }\n terminal.reset_damage();\n }\n\n // Drop terminal as early as possible to free lock.\n drop(terminal);\n\n // Add damage from alacritty's UI elements overlapping terminal.\n if self.collect_damage() {\n let requires_full_damage = self.visual_bell.intensity() != 0.\n || self.hint_state.active()\n || search_state.regex().is_some();\n\n if requires_full_damage {\n self.damage_tracker.frame().mark_fully_damaged();\n self.damage_tracker.next_frame().mark_fully_damaged();\n }\n\n let vi_cursor_viewport_point =\n vi_cursor_point.and_then(|cursor| point_to_viewport(display_offset, cursor));\n\n self.damage_tracker.damage_vi_cursor(vi_cursor_viewport_point);\n self.damage_tracker.damage_selection(selection_range, display_offset);\n }\n\n // Make sure this window's OpenGL context is active.\n self.make_current();\n\n self.renderer.clear(background_color, config.window_opacity());\n let mut lines = RenderLines::new();\n\n // Optimize loop hint comparator.\n let has_highlighted_hint =\n self.highlighted_hint.is_some() || self.vi_highlighted_hint.is_some();\n\n // Draw grid.\n {\n let _sampler = self.meter.sampler();\n\n // Ensure macOS hasn't reset our viewport.\n #[cfg(target_os = \"macos\")]\n self.renderer.set_viewport(&size_info);\n\n let glyph_cache = &mut self.glyph_cache;\n let highlighted_hint = &self.highlighted_hint;\n let vi_highlighted_hint = &self.vi_highlighted_hint;\n let damage_tracker = &mut self.damage_tracker;\n\n self.renderer.draw_cells(\n &size_info,\n glyph_cache,\n grid_cells.into_iter().map(|mut cell| {\n // Underline hints hovered by mouse or vi mode cursor.\n let point = term::viewport_to_point(display_offset, cell.point);\n\n if has_highlighted_hint {\n let hyperlink =\n cell.extra.as_ref().and_then(|extra| extra.hyperlink.as_ref());\n if highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n || vi_highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n {\n cell.flags.insert(Flags::UNDERLINE);\n // Damage hints for the current and next frames.\n damage_tracker.frame().damage_point(cell.point);\n damage_tracker.next_frame().damage_point(cell.point);\n }\n }\n\n // Update underline/strikeout.\n lines.update(&cell);\n\n cell\n }),\n );\n }\n\n let mut rects = lines.rects(&metrics, &size_info);\n\n if let Some(vi_cursor_point) = vi_cursor_point {\n // Indicate vi mode by showing the cursor's position in the top right corner.\n let line = (-vi_cursor_point.line.0 + size_info.bottommost_line().0) as usize;\n let obstructed_column = Some(vi_cursor_point)\n .filter(|point| point.line == -(display_offset as i32))\n .map(|point| point.column);\n self.draw_line_indicator(config, total_lines, obstructed_column, line);\n } else if search_state.regex().is_some() {\n // Show current display offset in vi-less search to indicate match position.\n self.draw_line_indicator(config, total_lines, None, display_offset);\n };\n\n // Draw cursor.\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n\n // Push visual bell after url/underline/strikeout rects.\n let visual_bell_intensity = self.visual_bell.intensity();\n if visual_bell_intensity != 0. {\n let visual_bell_rect = RenderRect::new(\n 0.,\n 0.,\n size_info.width(),\n size_info.height(),\n config.bell.color,\n visual_bell_intensity as f32,\n );\n rects.push(visual_bell_rect);\n }\n\n // Handle IME positioning and search bar rendering.\n let ime_position = match search_state.regex() {\n Some(regex) => {\n let search_label = match search_state.direction() {\n Direction::Right => FORWARD_SEARCH_LABEL,\n Direction::Left => BACKWARD_SEARCH_LABEL,\n };\n\n let search_text = Self::format_search(regex, search_label, size_info.columns());\n\n // Render the search bar.\n self.draw_search(config, &search_text);\n\n // Draw search bar cursor.\n let line = size_info.screen_lines();\n let column = Column(search_text.chars().count() - 1);\n\n // Add cursor to search bar if IME is not active.\n if self.ime.preedit().is_none() {\n let fg = config.colors.footer_bar_foreground();\n let shape = CursorShape::Underline;\n let cursor = RenderableCursor::new(Point::new(line, column), shape, fg, false);\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n }\n\n Some(Point::new(line, column))\n },\n None => {\n let num_lines = self.size_info.screen_lines();\n term::point_to_viewport(display_offset, cursor_point)\n .filter(|point| point.line < num_lines)\n },\n };\n\n // Handle IME.\n if self.ime.is_enabled() {\n if let Some(point) = ime_position {\n let (fg, bg) = if search_state.regex().is_some() {\n (config.colors.footer_bar_foreground(), config.colors.footer_bar_background())\n } else {\n (foreground_color, background_color)\n };\n\n self.draw_ime_preview(point, fg, bg, &mut rects, config);\n }\n }\n\n if let Some(message) = message_buffer.message() {\n let search_offset = usize::from(search_state.regex().is_some());\n let text = message.text(&size_info);\n\n // Create a new rectangle for the background.\n let start_line = size_info.screen_lines() + search_offset;\n let y = size_info.cell_height().mul_add(start_line as f32, size_info.padding_y());\n\n let bg = match message.ty() {\n MessageType::Error => config.colors.normal.red,\n MessageType::Warning => config.colors.normal.yellow,\n };\n\n let x = 0;\n let width = size_info.width() as i32;\n let height = (size_info.height() - y) as i32;\n let message_bar_rect =\n RenderRect::new(x as f32, y, width as f32, height as f32, bg, 1.);\n\n // Push message_bar in the end, so it'll be above all other content.\n rects.push(message_bar_rect);\n\n // Always damage message bar, since it could have messages of the same size in it.\n self.damage_tracker.frame().add_viewport_rect(&size_info, x, y as i32, width, height);\n\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n\n // Relay messages to the user.\n let glyph_cache = &mut self.glyph_cache;\n let fg = config.colors.primary.background;\n for (i, message_text) in text.iter().enumerate() {\n let point = Point::new(start_line + i, Column(0));\n self.renderer.draw_string(\n point,\n fg,\n bg,\n message_text.chars(),\n &size_info,\n glyph_cache,\n );\n }\n } else {\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n }\n\n self.draw_render_timer(config);\n\n // Draw hyperlink uri preview.\n if has_highlighted_hint {\n let cursor_point = vi_cursor_point.or(Some(cursor_point));\n self.draw_hyperlink_preview(config, cursor_point, display_offset);\n }\n\n // Notify winit that we're about to present.\n self.window.pre_present_notify();\n\n // Highlight damage for debugging.\n if self.damage_tracker.debug {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n let mut rects = Vec::with_capacity(damage.len());\n self.highlight_damage(&mut rects);\n self.renderer.draw_rects(&self.size_info, &metrics, rects);\n }\n\n // Clearing debug highlights from the previous frame requires full redraw.\n self.swap_buffers();\n\n if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n /// Update the mouse/vi mode cursor hint highlighting.\n ///\n /// This will return whether the highlighted hints changed.\n pub fn update_highlighted_hints(\n &mut self,\n term: &Term,\n config: &UiConfig,\n mouse: &Mouse,\n modifiers: ModifiersState,\n ) -> bool {\n // Update vi mode cursor hint.\n let vi_highlighted_hint = if term.mode().contains(TermMode::VI) {\n let mods = ModifiersState::all();\n let point = term.vi_mode_cursor.point;\n hint::highlighted_at(term, config, point, mods)\n } else {\n None\n };\n let mut dirty = vi_highlighted_hint != self.vi_highlighted_hint;\n self.vi_highlighted_hint = vi_highlighted_hint;\n\n // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n // Update cursor shape.\n if highlighted_hint.is_some() {\n // If mouse changed the line, we should update the hyperlink preview, since the\n // highlighted hint could be disrupted by the old preview.\n dirty = self.hint_mouse_point.map_or(false, |p| p.line != point.line);\n self.hint_mouse_point = Some(point);\n self.window.set_mouse_cursor(CursorIcon::Pointer);\n } else if self.highlighted_hint.is_some() {\n self.hint_mouse_point = None;\n if term.mode().intersects(TermMode::MOUSE_MODE) && !term.mode().contains(TermMode::VI) {\n self.window.set_mouse_cursor(CursorIcon::Default);\n } else {\n self.window.set_mouse_cursor(CursorIcon::Text);\n }\n }\n\n dirty |= self.highlighted_hint != highlighted_hint;\n self.highlighted_hint = highlighted_hint;\n\n dirty\n }\n\n #[inline(never)]\n fn draw_ime_preview(\n &mut self,\n point: Point,\n fg: Rgb,\n bg: Rgb,\n rects: &mut Vec,\n config: &UiConfig,\n ) {\n let preedit = match self.ime.preedit() {\n Some(preedit) => preedit,\n None => {\n // In case we don't have preedit, just set the popup point.\n self.window.update_ime_position(point, &self.size_info);\n return;\n },\n };\n\n let num_cols = self.size_info.columns();\n\n // Get the visible preedit.\n let visible_text: String = match (preedit.cursor_byte_offset, preedit.cursor_end_offset) {\n (Some(byte_offset), Some(end_offset)) if end_offset > num_cols => StrShortener::new(\n &preedit.text[byte_offset..],\n num_cols,\n ShortenDirection::Right,\n Some(SHORTENER),\n ),\n _ => {\n StrShortener::new(&preedit.text, num_cols, ShortenDirection::Left, Some(SHORTENER))\n },\n }\n .collect();\n\n let visible_len = visible_text.chars().count();\n\n let end = cmp::min(point.column.0 + visible_len, num_cols);\n let start = end.saturating_sub(visible_len);\n\n let start = Point::new(point.line, Column(start));\n let end = Point::new(point.line, Column(end - 1));\n\n let glyph_cache = &mut self.glyph_cache;\n let metrics = glyph_cache.font_metrics();\n\n self.renderer.draw_string(\n start,\n fg,\n bg,\n visible_text.chars(),\n &self.size_info,\n glyph_cache,\n );\n\n // Damage preedit inside the terminal viewport.\n if self.collect_damage() && point.line < self.size_info.screen_lines() {\n let damage = LineDamageBounds::new(start.line, 0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n // Add underline for preedit text.\n let underline = RenderLine { start, end, color: fg };\n rects.extend(underline.rects(Flags::UNDERLINE, &metrics, &self.size_info));\n\n let ime_popup_point = match preedit.cursor_end_offset {\n Some(cursor_end_offset) if cursor_end_offset != 0 => {\n let is_wide = preedit.text[preedit.cursor_byte_offset.unwrap_or_default()..]\n .chars()\n .next()\n .map(|ch| ch.width() == Some(2))\n .unwrap_or_default();\n\n let cursor_column = Column(\n (end.column.0 as isize - cursor_end_offset as isize + 1).max(0) as usize,\n );\n let cursor_point = Point::new(point.line, cursor_column);\n let cursor =\n RenderableCursor::new(cursor_point, CursorShape::HollowBlock, fg, is_wide);\n rects.extend(cursor.rects(&self.size_info, config.cursor.thickness()));\n cursor_point\n },\n _ => end,\n };\n\n self.window.update_ime_position(ime_popup_point, &self.size_info);\n }\n\n /// Format search regex to account for the cursor and fullwidth characters.\n fn format_search(search_regex: &str, search_label: &str, max_width: usize) -> String {\n let label_len = search_label.len();\n\n // Skip `search_regex` formatting if only label is visible.\n if label_len > max_width {\n return search_label[..max_width].to_owned();\n }\n\n // The search string consists of `search_label` + `search_regex` + `cursor`.\n let mut bar_text = String::from(search_label);\n bar_text.extend(StrShortener::new(\n search_regex,\n max_width.wrapping_sub(label_len + 1),\n ShortenDirection::Left,\n Some(SHORTENER),\n ));\n\n // Add place for cursor.\n bar_text.push(' ');\n\n bar_text\n }\n\n /// Draw preview for the currently highlighted `Hyperlink`.\n #[inline(never)]\n fn draw_hyperlink_preview(\n &mut self,\n config: &UiConfig,\n cursor_point: Option,\n display_offset: usize,\n ) {\n let num_cols = self.size_info.columns();\n let uris: Vec<_> = self\n .highlighted_hint\n .iter()\n .chain(&self.vi_highlighted_hint)\n .filter_map(|hint| hint.hyperlink().map(|hyperlink| hyperlink.uri()))\n .map(|uri| StrShortener::new(uri, num_cols, ShortenDirection::Right, Some(SHORTENER)))\n .collect();\n\n if uris.is_empty() {\n return;\n }\n\n // The maximum amount of protected lines including the ones we'll show preview on.\n let max_protected_lines = uris.len() * 2;\n\n // Lines we shouldn't show preview on, because it'll obscure the highlighted hint.\n let mut protected_lines = Vec::with_capacity(max_protected_lines);\n if self.size_info.screen_lines() > max_protected_lines {\n // Prefer to show preview even when it'll likely obscure the highlighted hint, when\n // there's no place left for it.\n protected_lines.push(self.hint_mouse_point.map(|point| point.line));\n protected_lines.push(cursor_point.map(|point| point.line));\n }\n\n // Find the line in viewport we can draw preview on without obscuring protected lines.\n let viewport_bottom = self.size_info.bottommost_line() - Line(display_offset as i32);\n let viewport_top = viewport_bottom - (self.size_info.screen_lines() - 1);\n let uri_lines = (viewport_top.0..=viewport_bottom.0)\n .rev()\n .map(|line| Some(Line(line)))\n .filter_map(|line| {\n if protected_lines.contains(&line) {\n None\n } else {\n protected_lines.push(line);\n line\n }\n })\n .take(uris.len())\n .flat_map(|line| term::point_to_viewport(display_offset, Point::new(line, Column(0))));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n for (uri, point) in uris.into_iter().zip(uri_lines) {\n // Damage the uri preview.\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n\n // Damage the uri preview for the next frame as well.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n self.renderer.draw_string(point, fg, bg, uri, &self.size_info, &mut self.glyph_cache);\n }\n }\n\n /// Draw current search regex.\n #[inline(never)]\n fn draw_search(&mut self, config: &UiConfig, text: &str) {\n // Assure text length is at least num_cols.\n let num_cols = self.size_info.columns();\n let text = format!(\"{:<1$}\", text, num_cols);\n\n let point = Point::new(self.size_info.screen_lines(), Column(0));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n\n self.renderer.draw_string(\n point,\n fg,\n bg,\n text.chars(),\n &self.size_info,\n &mut self.glyph_cache,\n );\n }\n\n /// Draw render timer.\n #[inline(never)]\n fn draw_render_timer(&mut self, config: &UiConfig) {\n if !config.debug.render_timer {\n return;\n }\n\n let timing = format!(\"{:.3} usec\", self.meter.average());\n let point = Point::new(self.size_info.screen_lines().saturating_sub(2), Column(0));\n let fg = config.colors.primary.background;\n let bg = config.colors.normal.red;\n\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, timing.len());\n self.damage_tracker.frame().damage_line(damage);\n // Damage the render timer for the next frame.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, timing.chars(), &self.size_info, glyph_cache);\n }\n\n /// Draw an indicator for the position of a line in history.\n #[inline(never)]\n fn draw_line_indicator(\n &mut self,\n config: &UiConfig,\n total_lines: usize,\n obstructed_column: Option,\n line: usize,\n ) {\n let columns = self.size_info.columns();\n let text = format!(\"[{}/{}]\", line, total_lines - 1);\n let column = Column(self.size_info.columns().saturating_sub(text.len()));\n let point = Point::new(0, column);\n\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, columns - 1);\n self.damage_tracker.frame().damage_line(damage);\n // Damage it on the next frame in case it goes away.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let colors = &config.colors;\n let fg = colors.line_indicator.foreground.unwrap_or(colors.primary.background);\n let bg = colors.line_indicator.background.unwrap_or(colors.primary.foreground);\n\n // Do not render anything if it would obscure the vi mode cursor.\n if obstructed_column.map_or(true, |obstructed_column| obstructed_column < column) {\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, text.chars(), &self.size_info, glyph_cache);\n }\n }\n\n /// Returns `true` if damage information should be collected, `false` otherwise.\n #[inline]\n fn collect_damage(&self) -> bool {\n matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) || self.damage_tracker.debug\n }\n\n /// Highlight damaged rects.\n ///\n /// This function is for debug purposes only.\n fn highlight_damage(&self, render_rects: &mut Vec) {\n for damage_rect in &self.damage_tracker.shape_frame_damage(self.size_info.into()) {\n let x = damage_rect.x as f32;\n let height = damage_rect.height as f32;\n let width = damage_rect.width as f32;\n let y = damage_y_to_viewport_y(&self.size_info, damage_rect) as f32;\n let render_rect = RenderRect::new(x, y, width, height, DAMAGE_RECT_COLOR, 0.5);\n\n render_rects.push(render_rect);\n }\n }\n\n /// Request a new frame for a window on Wayland.\n fn request_frame(&mut self, scheduler: &mut Scheduler) {\n // Mark that we've used a frame.\n self.window.has_frame = false;\n\n // Get the display vblank interval.\n let monitor_vblank_interval = 1_000_000.\n / self\n .window\n .current_monitor()\n .and_then(|monitor| monitor.refresh_rate_millihertz())\n .unwrap_or(60_000) as f64;\n\n // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\nimpl Drop for Display {\n fn drop(&mut self) {\n // Switch OpenGL context before dropping, otherwise objects (like programs) from other\n // contexts might be deleted when dropping renderer.\n self.make_current();\n unsafe {\n ManuallyDrop::drop(&mut self.renderer);\n ManuallyDrop::drop(&mut self.context);\n ManuallyDrop::drop(&mut self.surface);\n }\n }\n}\n\n/// Input method state.\n#[derive(Debug, Default)]\npub struct Ime {\n /// Whether the IME is enabled.\n enabled: bool,\n\n /// Current IME preedit.\n preedit: Option,\n}\n\nimpl Ime {\n #[inline]\n pub fn set_enabled(&mut self, is_enabled: bool) {\n if is_enabled {\n self.enabled = is_enabled\n } else {\n // Clear state when disabling IME.\n *self = Default::default();\n }\n }\n\n #[inline]\n pub fn is_enabled(&self) -> bool {\n self.enabled\n }\n\n #[inline]\n pub fn set_preedit(&mut self, preedit: Option) {\n self.preedit = preedit;\n }\n\n #[inline]\n pub fn preedit(&self) -> Option<&Preedit> {\n self.preedit.as_ref()\n }\n}\n\n#[derive(Debug, Default, PartialEq, Eq)]\npub struct Preedit {\n /// The preedit text.\n text: String,\n\n /// Byte offset for cursor start into the preedit text.\n ///\n /// `None` means that the cursor is invisible.\n cursor_byte_offset: Option,\n\n /// The cursor offset from the end of the preedit in char width.\n cursor_end_offset: Option,\n}\n\nimpl Preedit {\n pub fn new(text: String, cursor_byte_offset: Option) -> Self {\n let cursor_end_offset = if let Some(byte_offset) = cursor_byte_offset {\n // Convert byte offset into char offset.\n let cursor_end_offset =\n text[byte_offset..].chars().fold(0, |acc, ch| acc + ch.width().unwrap_or(1));\n\n Some(cursor_end_offset)\n } else {\n None\n };\n\n Self { text, cursor_byte_offset, cursor_end_offset }\n }\n}\n\n/// Pending renderer updates.\n///\n/// All renderer updates are cached to be applied just before rendering, to avoid platform-specific\n/// rendering issues.\n#[derive(Debug, Default, Copy, Clone)]\npub struct RendererUpdate {\n /// Should resize the window.\n resize: bool,\n\n /// Clear font caches.\n clear_font_cache: bool,\n}\n\n/// Struct for safe in-place replacement.\n///\n/// This struct allows easily replacing struct fields that provide `self -> Self` methods in-place,\n/// without having to deal with constantly unwrapping the underlying [`Option`].\nstruct Replaceable(Option);\n\nimpl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\nimpl Deref for Replaceable {\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n self.get()\n }\n}\n\nimpl DerefMut for Replaceable {\n fn deref_mut(&mut self) -> &mut Self::Target {\n self.get_mut()\n }\n}\n\n/// The frame timer state.\npub struct FrameTimer {\n /// Base timestamp used to compute sync points.\n base: Instant,\n\n /// The last timestamp we synced to.\n last_synced_timestamp: Instant,\n\n /// The refresh rate we've used to compute sync timestamps.\n refresh_interval: Duration,\n}\n\nimpl FrameTimer {\n pub fn new() -> Self {\n let now = Instant::now();\n Self { base: now, last_synced_timestamp: now, refresh_interval: Duration::ZERO }\n }\n\n /// Compute the delay that we should use to achieve the target frame\n /// rate.\n pub fn compute_timeout(&mut self, refresh_interval: Duration) -> Duration {\n let now = Instant::now();\n\n // Handle refresh rate change.\n if self.refresh_interval != refresh_interval {\n self.base = now;\n self.last_synced_timestamp = now;\n self.refresh_interval = refresh_interval;\n return refresh_interval;\n }\n\n let next_frame = self.last_synced_timestamp + self.refresh_interval;\n\n if next_frame < now {\n // Redraw immediately if we haven't drawn in over `refresh_interval` microseconds.\n let elapsed_micros = (now - self.base).as_micros() as u64;\n let refresh_micros = self.refresh_interval.as_micros() as u64;\n self.last_synced_timestamp =\n now - Duration::from_micros(elapsed_micros % refresh_micros);\n Duration::ZERO\n } else {\n // Redraw on the next `refresh_interval` clock tick.\n self.last_synced_timestamp = next_frame;\n next_frame - now\n }\n }\n}\n\n/// Calculate the cell dimensions based on font metrics.\n///\n/// This will return a tuple of the cell width and height.\n#[inline]\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n let offset_x = f64::from(config.font.offset.x);\n let offset_y = f64::from(config.font.offset.y);\n (\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n )\n}\n\n/// Calculate the size of the window given padding, terminal dimensions and cell size.\nfn window_size(\n config: &UiConfig,\n dimensions: Dimensions,\n cell_width: f32,\n cell_height: f32,\n scale_factor: f32,\n) -> PhysicalSize {\n let padding = config.window.padding(scale_factor);\n\n let grid_width = cell_width * dimensions.columns.max(MIN_COLUMNS) as f32;\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n PhysicalSize::new(width as u32, height as u32)\n}\n", "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/display/mod.rs", "repo_name": "alacritty/alacritty", "num_chunks": 83, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 0, "content": "//! The display subsystem including window management, font rasterization, and\n//! GPU drawing.\n\nuse std::cmp;\nuse std::fmt::{self, Formatter};\nuse std::mem::{self, ManuallyDrop};\nuse std::num::NonZeroU32;\nuse std::ops::{Deref, DerefMut};\nuse std::time::{Duration, Instant};\n\nuse glutin::context::{NotCurrentContext, PossiblyCurrentContext};\nuse glutin::prelude::*;\nuse glutin::surface::{Surface, SwapInterval, WindowSurface};\n\nuse log::{debug, info};\nuse parking_lot::MutexGuard;\nuse raw_window_handle::RawWindowHandle;\nuse serde::{Deserialize, Serialize};\nuse winit::dpi::PhysicalSize;\nuse winit::keyboard::ModifiersState;\nuse winit::window::CursorIcon;\n\nuse crossfont::{Rasterize, Rasterizer, Size as FontSize};\nuse unicode_width::UnicodeWidthChar;\n\n", "meta": {"hash_id": "484264198542f621aca7f7e0469f3f5fccb23b1db7ac149238eed2ec5f595fd6"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 1, "content": "use alacritty_terminal::event::{EventListener, OnResize, WindowSize};\nuse alacritty_terminal::grid::Dimensions as TermDimensions;\nuse alacritty_terminal::index::{Column, Direction, Line, Point};\nuse alacritty_terminal::selection::Selection;\nuse alacritty_terminal::term::cell::Flags;\nuse alacritty_terminal::term::{\n self, point_to_viewport, LineDamageBounds, Term, TermDamage, TermMode, MIN_COLUMNS,\n MIN_SCREEN_LINES,\n};\nuse alacritty_terminal::vte::ansi::{CursorShape, NamedColor};\n\n", "meta": {"hash_id": "76d9a8e79ad1411705e13167d40249eed4f8d6364b22acd0869bfe12e7b56338"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 2, "content": "use crate::config::font::Font;\nuse crate::config::window::Dimensions;\n#[cfg(not(windows))]\nuse crate::config::window::StartupMode;\nuse crate::config::UiConfig;\nuse crate::display::bell::VisualBell;\nuse crate::display::color::{List, Rgb};\nuse crate::display::content::{RenderableContent, RenderableCursor};\nuse crate::display::cursor::IntoRects;\nuse crate::display::damage::{damage_y_to_viewport_y, DamageTracker};\nuse crate::display::hint::{HintMatch, HintState};\nuse crate::display::meter::Meter;\nuse crate::display::window::Window;\nuse crate::event::{Event, EventType, Mouse, SearchState};\nuse crate::message_bar::{MessageBuffer, MessageType};\nuse crate::renderer::rects::{RenderLine, RenderLines, RenderRect};\nuse crate::renderer::{self, GlyphCache, Renderer};\nuse crate::scheduler::{Scheduler, TimerId, Topic};\nuse crate::string::{ShortenDirection, StrShortener};\n\n", "meta": {"hash_id": "5658811313267444de24e0bc4280ed6897a88442d59a6d9a9805b10fae348225"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 3, "content": "pub mod color;\npub mod content;\npub mod cursor;\npub mod hint;\npub mod window;\n\nmod bell;\nmod damage;\nmod meter;\n\n/// Label for the forward terminal search bar.\nconst FORWARD_SEARCH_LABEL: &str = \"Search: \";\n\n/// Label for the backward terminal search bar.\nconst BACKWARD_SEARCH_LABEL: &str = \"Backward Search: \";\n\n/// The character used to shorten the visible text like uri preview or search regex.\nconst SHORTENER: char = '…';\n\n/// Color which is used to highlight damaged rects when debugging.\nconst DAMAGE_RECT_COLOR: Rgb = Rgb::new(255, 0, 255);\n\n#[derive(Debug)]\npub enum Error {\n /// Error with window management.\n Window(window::Error),\n\n", "meta": {"hash_id": "e81ad765dd04aa2f33ced2d8e1c335d66fa46e352bbad1d604f622daf6a07275"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 4, "content": " /// Error dealing with fonts.\n Font(crossfont::Error),\n\n /// Error in renderer.\n Render(renderer::Error),\n\n /// Error during context operations.\n Context(glutin::error::Error),\n}\n\nimpl std::error::Error for Error {\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n match self {\n Error::Window(err) => err.source(),\n Error::Font(err) => err.source(),\n Error::Render(err) => err.source(),\n Error::Context(err) => err.source(),\n }\n }\n}\n\n", "meta": {"hash_id": "f9071da0991c1542dcd7921265508b2cbca7f6da227559a28ac1b5703c96a6e9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 5, "content": "impl fmt::Display for Error {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n match self {\n Error::Window(err) => err.fmt(f),\n Error::Font(err) => err.fmt(f),\n Error::Render(err) => err.fmt(f),\n Error::Context(err) => err.fmt(f),\n }\n }\n}\n\nimpl From for Error {\n fn from(val: window::Error) -> Self {\n Error::Window(val)\n }\n}\n\nimpl From for Error {\n fn from(val: crossfont::Error) -> Self {\n Error::Font(val)\n }\n}\n\nimpl From for Error {\n fn from(val: renderer::Error) -> Self {\n Error::Render(val)\n }\n}\n\nimpl From for Error {\n fn from(val: glutin::error::Error) -> Self {\n Error::Context(val)\n }\n}\n\n", "meta": {"hash_id": "c7e78722d16533c3203bf8a963bf81552ccb100fb4e41c7d835e169d92c4286d"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 6, "content": "/// Terminal size info.\n#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)]\npub struct SizeInfo {\n /// Terminal window width.\n width: T,\n\n /// Terminal window height.\n height: T,\n\n /// Width of individual cell.\n cell_width: T,\n\n /// Height of individual cell.\n cell_height: T,\n\n /// Horizontal window padding.\n padding_x: T,\n\n /// Vertical window padding.\n padding_y: T,\n\n /// Number of lines in the viewport.\n screen_lines: usize,\n\n /// Number of columns in the viewport.\n columns: usize,\n}\n\n", "meta": {"hash_id": "42a9f7ba3845fe29a133013618f15cf26f8cbe47a4a9220416f8edf6af47996c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 7, "content": "impl From> for SizeInfo {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n width: size_info.width as u32,\n height: size_info.height as u32,\n cell_width: size_info.cell_width as u32,\n cell_height: size_info.cell_height as u32,\n padding_x: size_info.padding_x as u32,\n padding_y: size_info.padding_y as u32,\n screen_lines: size_info.screen_lines,\n columns: size_info.screen_lines,\n }\n }\n}\n\n", "meta": {"hash_id": "322cbccb60ecbba5bfc46ccdc2a6ecb6e78a7002c0426a282f6371e3946e72bd"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 8, "content": "impl From> for WindowSize {\n fn from(size_info: SizeInfo) -> Self {\n Self {\n num_cols: size_info.columns() as u16,\n num_lines: size_info.screen_lines() as u16,\n cell_width: size_info.cell_width() as u16,\n cell_height: size_info.cell_height() as u16,\n }\n }\n}\n\nimpl SizeInfo {\n #[inline]\n pub fn width(&self) -> T {\n self.width\n }\n\n #[inline]\n pub fn height(&self) -> T {\n self.height\n }\n\n #[inline]\n pub fn cell_width(&self) -> T {\n self.cell_width\n }\n\n #[inline]\n pub fn cell_height(&self) -> T {\n self.cell_height\n }\n\n #[inline]\n pub fn padding_x(&self) -> T {\n self.padding_x\n }\n\n #[inline]\n pub fn padding_y(&self) -> T {\n self.padding_y\n }\n}\n\n", "meta": {"hash_id": "d3da5d4d6a42319a17785d3cb52f22fb2532996de360fb0a70d1a10dc581ef57"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 9, "content": "impl SizeInfo {\n #[allow(clippy::too_many_arguments)]\n pub fn new(\n width: f32,\n height: f32,\n cell_width: f32,\n cell_height: f32,\n mut padding_x: f32,\n mut padding_y: f32,\n dynamic_padding: bool,\n ) -> SizeInfo {\n if dynamic_padding {\n padding_x = Self::dynamic_padding(padding_x.floor(), width, cell_width);\n padding_y = Self::dynamic_padding(padding_y.floor(), height, cell_height);\n }\n\n", "meta": {"hash_id": "fa70b304bccabbf388746322eebdfc35277b964c83b6baedb11d87538395744a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 10, "content": " let lines = (height - 2. * padding_y) / cell_height;\n let screen_lines = cmp::max(lines as usize, MIN_SCREEN_LINES);\n\n let columns = (width - 2. * padding_x) / cell_width;\n let columns = cmp::max(columns as usize, MIN_COLUMNS);\n\n SizeInfo {\n width,\n height,\n cell_width,\n cell_height,\n padding_x: padding_x.floor(),\n padding_y: padding_y.floor(),\n screen_lines,\n columns,\n }\n }\n\n #[inline]\n pub fn reserve_lines(&mut self, count: usize) {\n self.screen_lines = cmp::max(self.screen_lines.saturating_sub(count), MIN_SCREEN_LINES);\n }\n\n", "meta": {"hash_id": "332446484a520ca0002f602db88ae1228f7b7aa8adbf2e83ab0fd1df748d64a3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 11, "content": " /// Check if coordinates are inside the terminal grid.\n ///\n /// The padding, message bar or search are not counted as part of the grid.\n #[inline]\n pub fn contains_point(&self, x: usize, y: usize) -> bool {\n x <= (self.padding_x + self.columns as f32 * self.cell_width) as usize\n && x > self.padding_x as usize\n && y <= (self.padding_y + self.screen_lines as f32 * self.cell_height) as usize\n && y > self.padding_y as usize\n }\n\n", "meta": {"hash_id": "ef5f151414c4d3a796a840d12e29c2593453898ae0a1bf9a3c4402e62494d7b3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 12, "content": " /// Calculate padding to spread it evenly around the terminal content.\n #[inline]\n fn dynamic_padding(padding: f32, dimension: f32, cell_dimension: f32) -> f32 {\n padding + ((dimension - 2. * padding) % cell_dimension) / 2.\n }\n}\n\nimpl TermDimensions for SizeInfo {\n #[inline]\n fn columns(&self) -> usize {\n self.columns\n }\n\n #[inline]\n fn screen_lines(&self) -> usize {\n self.screen_lines\n }\n\n #[inline]\n fn total_lines(&self) -> usize {\n self.screen_lines()\n }\n}\n\n", "meta": {"hash_id": "73e5fe0b56a019ff26ad2381ff4b03e23d7765f4c505ed3d25a49d935c055637"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 13, "content": "#[derive(Default, Clone, Debug, PartialEq, Eq)]\npub struct DisplayUpdate {\n pub dirty: bool,\n\n dimensions: Option>,\n cursor_dirty: bool,\n font: Option,\n}\n\nimpl DisplayUpdate {\n pub fn dimensions(&self) -> Option> {\n self.dimensions\n }\n\n pub fn font(&self) -> Option<&Font> {\n self.font.as_ref()\n }\n\n pub fn cursor_dirty(&self) -> bool {\n self.cursor_dirty\n }\n\n pub fn set_dimensions(&mut self, dimensions: PhysicalSize) {\n self.dimensions = Some(dimensions);\n self.dirty = true;\n }\n\n pub fn set_font(&mut self, font: Font) {\n self.font = Some(font);\n self.dirty = true;\n }\n\n pub fn set_cursor_dirty(&mut self) {\n self.cursor_dirty = true;\n self.dirty = true;\n }\n}\n\n", "meta": {"hash_id": "e4f08dc76384cfa77f446aae70d820d82d9adea80d638f8db7324a8f69d73b5d"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 14, "content": "/// The display wraps a window, font rasterizer, and GPU renderer.\npub struct Display {\n pub window: Window,\n\n pub size_info: SizeInfo,\n\n /// Hint highlighted by the mouse.\n pub highlighted_hint: Option,\n\n /// Hint highlighted by the vi mode cursor.\n pub vi_highlighted_hint: Option,\n\n pub raw_window_handle: RawWindowHandle,\n\n /// UI cursor visibility for blinking.\n pub cursor_hidden: bool,\n\n pub visual_bell: VisualBell,\n\n /// Mapped RGB values for each terminal color.\n pub colors: List,\n\n /// State of the keyboard hints.\n pub hint_state: HintState,\n\n /// Unprocessed display updates.\n pub pending_update: DisplayUpdate,\n\n /// The renderer update that takes place only once before the actual rendering.\n pub pending_renderer_update: Option,\n\n", "meta": {"hash_id": "3811d9736c47f355a8ad4b929c285e77eb099350e68420142636952a39f2223e"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 15, "content": " /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n", "meta": {"hash_id": "b15f77642e7ceded8462e22b6ac3f81c2205a35d59841fdc3fe6ad8c4ab9f3ae"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 16, "content": " let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n", "meta": {"hash_id": "72b2e89ab1dc3d3689c6c98fec13bb24aadee6692a3837c6580dbc15874b986a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 17, "content": " // Create the GL surface to draw into.\n let surface = renderer::platform::create_gl_surface(\n &gl_context,\n window.inner_size(),\n window.raw_window_handle(),\n )?;\n\n // Make the context current.\n let context = gl_context.make_current(&surface)?;\n\n // Create renderer.\n let mut renderer = Renderer::new(&context, config.debug.renderer)?;\n\n", "meta": {"hash_id": "e835ec5d07329f713115a3c6273e5eb65798fd8f337f30af00c2bd2cca72fde9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 18, "content": " // Load font common glyphs to accelerate rendering.\n debug!(\"Filling glyph cache with common glyphs\");\n renderer.with_loader(|mut api| {\n glyph_cache.reset_glyph_cache(&mut api);\n });\n\n let padding = config.window.padding(window.scale_factor as f32);\n let viewport_size = window.inner_size();\n\n // Create new size with at least one column and row.\n let size_info = SizeInfo::new(\n viewport_size.width as f32,\n viewport_size.height as f32,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding && config.window.dimensions().is_none(),\n );\n\n", "meta": {"hash_id": "91bb3ae5c6b4871b1e845d262a3a43b180f51fb8d6f21ecce16cf8e1c92501f3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 19, "content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n info!(\"Padding: {} x {}\", size_info.padding_x(), size_info.padding_y());\n info!(\"Width: {}, Height: {}\", size_info.width(), size_info.height());\n\n // Update OpenGL projection.\n renderer.resize(&size_info);\n\n // Clear screen.\n let background_color = config.colors.primary.background;\n renderer.clear(background_color, config.window_opacity());\n\n // Disable shadows for transparent windows on macOS.\n #[cfg(target_os = \"macos\")]\n window.set_has_shadow(config.window_opacity() >= 1.0);\n\n let is_wayland = matches!(raw_window_handle, RawWindowHandle::Wayland(_));\n\n", "meta": {"hash_id": "7faf9dbeb3a6455f09d242144a94e725f681ff8c08ea0280f3de2b22f4d55f6c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 20, "content": " // On Wayland we can safely ignore this call, since the window isn't visible until you\n // actually draw something into it and commit those changes.\n if !is_wayland {\n surface.swap_buffers(&context).expect(\"failed to swap buffers.\");\n renderer.finish();\n }\n\n // Set resize increments for the newly created window.\n if config.window.resize_increments {\n window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n window.set_visible(true);\n\n", "meta": {"hash_id": "c5c51f4f8a09dcfa7cd5c7d48a5506f82a3044f317efeafd10d19b70c5ac9b7b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 21, "content": " #[allow(clippy::single_match)]\n #[cfg(not(windows))]\n if !_tabbed {\n match config.window.startup_mode {\n #[cfg(target_os = \"macos\")]\n StartupMode::SimpleFullscreen => window.set_simple_fullscreen(true),\n StartupMode::Maximized if !is_wayland => window.set_maximized(true),\n _ => (),\n }\n }\n\n let hint_state = HintState::new(config.hints.alphabet());\n\n let mut damage_tracker = DamageTracker::new(size_info.screen_lines(), size_info.columns());\n damage_tracker.debug = config.debug.highlight_damage;\n\n // Disable vsync.\n if let Err(err) = surface.set_swap_interval(&context, SwapInterval::DontWait) {\n info!(\"Failed to disable vsync: {}\", err);\n }\n\n", "meta": {"hash_id": "35aa2a8614b0447dfb154a87dfb78adb124b3317914b16156fed58fd3f314225"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 22, "content": " Ok(Self {\n context: ManuallyDrop::new(Replaceable::new(context)),\n visual_bell: VisualBell::from(&config.bell),\n renderer: ManuallyDrop::new(renderer),\n surface: ManuallyDrop::new(surface),\n colors: List::from(&config.colors),\n frame_timer: FrameTimer::new(),\n raw_window_handle,\n damage_tracker,\n glyph_cache,\n hint_state,\n size_info,\n font_size,\n window,\n pending_renderer_update: Default::default(),\n vi_highlighted_hint: Default::default(),\n highlighted_hint: Default::default(),\n hint_mouse_point: Default::default(),\n pending_update: Default::default(),\n cursor_hidden: Default::default(),\n meter: Default::default(),\n ime: Default::default(),\n })\n }\n\n", "meta": {"hash_id": "e768c80e459a073329b21eb0d348a82d396b8fffaf27b945fdeba4d425f7b5ca"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 23, "content": " #[inline]\n pub fn gl_context(&self) -> &PossiblyCurrentContext {\n self.context.get()\n }\n\n pub fn make_not_current(&mut self) {\n if self.context.get().is_current() {\n self.context.replace_with(|context| {\n context\n .make_not_current()\n .expect(\"failed to disable context\")\n .treat_as_possibly_current()\n });\n }\n }\n\n pub fn make_current(&self) {\n if !self.context.get().is_current() {\n self.context.make_current(&self.surface).expect(\"failed to make context current\")\n }\n }\n\n", "meta": {"hash_id": "aa553a1499d68d6e0956646608fa67bb13518dc62eb65df6a3070b35c474038f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 24, "content": " fn swap_buffers(&self) {\n #[allow(clippy::single_match)]\n let res = match (self.surface.deref(), &self.context.get()) {\n #[cfg(not(any(target_os = \"macos\", windows)))]\n (Surface::Egl(surface), PossiblyCurrentContext::Egl(context))\n if matches!(self.raw_window_handle, RawWindowHandle::Wayland(_))\n && !self.damage_tracker.debug =>\n {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n surface.swap_buffers_with_damage(context, &damage)\n },\n (surface, context) => surface.swap_buffers(context),\n };\n if let Err(err) = res {\n debug!(\"error calling swap_buffers: {}\", err);\n }\n }\n\n", "meta": {"hash_id": "b668652e20a43061c67b22b03f56c114a7dc72bd75a21da776c9356ff2154af0"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 25, "content": " /// Update font size and cell dimensions.\n ///\n /// This will return a tuple of the cell width and height.\n fn update_font_size(\n glyph_cache: &mut GlyphCache,\n config: &UiConfig,\n font: &Font,\n ) -> (f32, f32) {\n let _ = glyph_cache.update_font_size(font);\n\n // Compute new cell sizes.\n compute_cell_size(config, &glyph_cache.font_metrics())\n }\n\n /// Reset glyph cache.\n fn reset_glyph_cache(&mut self) {\n let cache = &mut self.glyph_cache;\n self.renderer.with_loader(|mut api| {\n cache.reset_glyph_cache(&mut api);\n });\n }\n\n", "meta": {"hash_id": "fe9e2774ab5dfd564b6579cfb9b1ce85b2015177d221aef32e2d3ae14507cec4"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 26, "content": " // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n", "meta": {"hash_id": "ed1f56505a3374dd2edecc05ad0cee8c469187f9ecc26860492082baefccf9b5"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 27, "content": " let (mut cell_width, mut cell_height) =\n (self.size_info.cell_width(), self.size_info.cell_height());\n\n if pending_update.font().is_some() || pending_update.cursor_dirty() {\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.clear_font_cache = true\n }\n\n // Update font size and cell dimensions.\n if let Some(font) = pending_update.font() {\n let cell_dimensions = Self::update_font_size(&mut self.glyph_cache, config, font);\n cell_width = cell_dimensions.0;\n cell_height = cell_dimensions.1;\n\n", "meta": {"hash_id": "79e8568f712841aa209157ef132823f1306f62017a353e68b6113b714b554136"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 28, "content": " info!(\"Cell size: {} x {}\", cell_width, cell_height);\n\n // Mark entire terminal as damaged since glyph size could change without cell size\n // changes.\n self.damage_tracker.frame().mark_fully_damaged();\n }\n\n let (mut width, mut height) = (self.size_info.width(), self.size_info.height());\n if let Some(dimensions) = pending_update.dimensions() {\n width = dimensions.width as f32;\n height = dimensions.height as f32;\n }\n\n", "meta": {"hash_id": "20a281598c4b5f24da35d80abaf1a660d7a05a7d30f7566041448bf4c6a49ad7"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 29, "content": " let padding = config.window.padding(self.window.scale_factor as f32);\n\n let mut new_size = SizeInfo::new(\n width,\n height,\n cell_width,\n cell_height,\n padding.0,\n padding.1,\n config.window.dynamic_padding,\n );\n\n // Update number of column/lines in the viewport.\n let search_active = search_state.history_index.is_some();\n let message_bar_lines = message_buffer.message().map_or(0, |m| m.text(&new_size).len());\n let search_lines = usize::from(search_active);\n new_size.reserve_lines(message_bar_lines + search_lines);\n\n", "meta": {"hash_id": "72069e14b9121905e307747adf2b90579c0aae90998058ad1320e1f17c892f17"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 30, "content": " // Update resize increments.\n if config.window.resize_increments {\n self.window.set_resize_increments(PhysicalSize::new(cell_width, cell_height));\n }\n\n // Resize when terminal when its dimensions have changed.\n if self.size_info.screen_lines() != new_size.screen_lines\n || self.size_info.columns() != new_size.columns()\n {\n // Resize PTY.\n pty_resize_handle.on_resize(new_size.into());\n\n", "meta": {"hash_id": "591f61d27ba4860ebe42874418318ca473425d226193d4c3803e71da91332e9a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 31, "content": " // Resize terminal.\n terminal.resize(new_size);\n\n // Resize damage tracking.\n self.damage_tracker.resize(new_size.screen_lines(), new_size.columns());\n }\n\n // Check if dimensions have changed.\n if new_size != self.size_info {\n // Queue renderer update.\n let renderer_update = self.pending_renderer_update.get_or_insert(Default::default());\n renderer_update.resize = true;\n\n // Clear focused search match.\n search_state.clear_focused_match();\n }\n self.size_info = new_size;\n }\n\n", "meta": {"hash_id": "cbd894d544aaeadcbfea18922999949498c11df57b4722c93f1d487f08ede145"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 32, "content": " // NOTE: Renderer updates are split off, since platforms like Wayland require resize and other\n // OpenGL operations to be performed right before rendering. Otherwise they could lock the\n // back buffer and render with the previous state. This also solves flickering during resizes.\n //\n /// Update the state of the renderer.\n pub fn process_renderer_update(&mut self) {\n let renderer_update = match self.pending_renderer_update.take() {\n Some(renderer_update) => renderer_update,\n _ => return,\n };\n\n", "meta": {"hash_id": "bbf795ca1bbb43287648230c60608c2d4c4a558dfac639cf85cee2a03f842120"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 33, "content": " // Resize renderer.\n if renderer_update.resize {\n let width = NonZeroU32::new(self.size_info.width() as u32).unwrap();\n let height = NonZeroU32::new(self.size_info.height() as u32).unwrap();\n self.surface.resize(&self.context, width, height);\n }\n\n // Ensure we're modifying the correct OpenGL context.\n self.make_current();\n\n if renderer_update.clear_font_cache {\n self.reset_glyph_cache();\n }\n\n self.renderer.resize(&self.size_info);\n\n info!(\"Padding: {} x {}\", self.size_info.padding_x(), self.size_info.padding_y());\n info!(\"Width: {}, Height: {}\", self.size_info.width(), self.size_info.height());\n }\n\n", "meta": {"hash_id": "bae2fda06b24164a992b035f05346cec0db2aae06c91013b8901d87838639100"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 34, "content": " /// Draw the screen.\n ///\n /// A reference to Term whose state is being drawn must be provided.\n ///\n /// This call may block if vsync is enabled.\n pub fn draw(\n &mut self,\n mut terminal: MutexGuard<'_, Term>,\n scheduler: &mut Scheduler,\n message_buffer: &MessageBuffer,\n config: &UiConfig,\n search_state: &mut SearchState,\n ) {\n // Collect renderable content before the terminal is dropped.\n let mut content = RenderableContent::new(config, self, &terminal, search_state);\n", "meta": {"hash_id": "4ee907a039f825726c2412e854fe4bc9a179f4904d347aed1d037b9b9432af2f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 35, "content": " let mut grid_cells = Vec::new();\n for cell in &mut content {\n grid_cells.push(cell);\n }\n let selection_range = content.selection_range();\n let foreground_color = content.color(NamedColor::Foreground as usize);\n let background_color = content.color(NamedColor::Background as usize);\n let display_offset = content.display_offset();\n let cursor = content.cursor();\n\n let cursor_point = terminal.grid().cursor.point;\n let total_lines = terminal.grid().total_lines();\n let metrics = self.glyph_cache.font_metrics();\n let size_info = self.size_info;\n\n let vi_mode = terminal.mode().contains(TermMode::VI);\n let vi_cursor_point = if vi_mode { Some(terminal.vi_mode_cursor.point) } else { None };\n\n", "meta": {"hash_id": "fe491ce79ac648bfff38ee67779efea880d2cd4a4839d5ec21b41f0815d5442e"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 36, "content": " // Add damage from the terminal.\n if self.collect_damage() {\n match terminal.damage() {\n TermDamage::Full => self.damage_tracker.frame().mark_fully_damaged(),\n TermDamage::Partial(damaged_lines) => {\n for damage in damaged_lines {\n self.damage_tracker.frame().damage_line(damage);\n }\n },\n }\n terminal.reset_damage();\n }\n\n // Drop terminal as early as possible to free lock.\n drop(terminal);\n\n // Add damage from alacritty's UI elements overlapping terminal.\n if self.collect_damage() {\n let requires_full_damage = self.visual_bell.intensity() != 0.\n || self.hint_state.active()\n || search_state.regex().is_some();\n\n", "meta": {"hash_id": "78edad84eefb6200419f24f43c2eafca931dce7eec6ca54851fe18f91979f03b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 37, "content": " if requires_full_damage {\n self.damage_tracker.frame().mark_fully_damaged();\n self.damage_tracker.next_frame().mark_fully_damaged();\n }\n\n let vi_cursor_viewport_point =\n vi_cursor_point.and_then(|cursor| point_to_viewport(display_offset, cursor));\n\n self.damage_tracker.damage_vi_cursor(vi_cursor_viewport_point);\n self.damage_tracker.damage_selection(selection_range, display_offset);\n }\n\n // Make sure this window's OpenGL context is active.\n self.make_current();\n\n self.renderer.clear(background_color, config.window_opacity());\n let mut lines = RenderLines::new();\n\n // Optimize loop hint comparator.\n let has_highlighted_hint =\n self.highlighted_hint.is_some() || self.vi_highlighted_hint.is_some();\n\n", "meta": {"hash_id": "fbf5a992b119745ae32862906a730ddbedac02888a88bc5edd93d7cfaa3d4982"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 38, "content": " // Draw grid.\n {\n let _sampler = self.meter.sampler();\n\n // Ensure macOS hasn't reset our viewport.\n #[cfg(target_os = \"macos\")]\n self.renderer.set_viewport(&size_info);\n\n let glyph_cache = &mut self.glyph_cache;\n let highlighted_hint = &self.highlighted_hint;\n let vi_highlighted_hint = &self.vi_highlighted_hint;\n let damage_tracker = &mut self.damage_tracker;\n\n self.renderer.draw_cells(\n &size_info,\n glyph_cache,\n grid_cells.into_iter().map(|mut cell| {\n // Underline hints hovered by mouse or vi mode cursor.\n let point = term::viewport_to_point(display_offset, cell.point);\n\n", "meta": {"hash_id": "8c40ac29292bd9425a2fcef0f8c826637888ee41be53e36d9a257d3c3a7c15a3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 39, "content": " if has_highlighted_hint {\n let hyperlink =\n cell.extra.as_ref().and_then(|extra| extra.hyperlink.as_ref());\n if highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n || vi_highlighted_hint\n .as_ref()\n .map_or(false, |hint| hint.should_highlight(point, hyperlink))\n {\n cell.flags.insert(Flags::UNDERLINE);\n // Damage hints for the current and next frames.\n damage_tracker.frame().damage_point(cell.point);\n damage_tracker.next_frame().damage_point(cell.point);\n }\n }\n\n", "meta": {"hash_id": "3520acced525739e717b6e0e00879a0189cd42190601b2223baa6ad5555801e4"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 40, "content": " // Update underline/strikeout.\n lines.update(&cell);\n\n cell\n }),\n );\n }\n\n let mut rects = lines.rects(&metrics, &size_info);\n\n if let Some(vi_cursor_point) = vi_cursor_point {\n // Indicate vi mode by showing the cursor's position in the top right corner.\n let line = (-vi_cursor_point.line.0 + size_info.bottommost_line().0) as usize;\n let obstructed_column = Some(vi_cursor_point)\n .filter(|point| point.line == -(display_offset as i32))\n .map(|point| point.column);\n self.draw_line_indicator(config, total_lines, obstructed_column, line);\n } else if search_state.regex().is_some() {\n // Show current display offset in vi-less search to indicate match position.\n self.draw_line_indicator(config, total_lines, None, display_offset);\n };\n\n", "meta": {"hash_id": "298265debd2bb5d01f2becadcd2b913ad31b2ad76776a82a8cdc3f36307c54dd"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 41, "content": " // Draw cursor.\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n\n // Push visual bell after url/underline/strikeout rects.\n let visual_bell_intensity = self.visual_bell.intensity();\n if visual_bell_intensity != 0. {\n let visual_bell_rect = RenderRect::new(\n 0.,\n 0.,\n size_info.width(),\n size_info.height(),\n config.bell.color,\n visual_bell_intensity as f32,\n );\n rects.push(visual_bell_rect);\n }\n\n", "meta": {"hash_id": "18599d3454e142675535df415c5701663d07bffa7f8411f237c35a3d22c94e8f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 42, "content": " // Handle IME positioning and search bar rendering.\n let ime_position = match search_state.regex() {\n Some(regex) => {\n let search_label = match search_state.direction() {\n Direction::Right => FORWARD_SEARCH_LABEL,\n Direction::Left => BACKWARD_SEARCH_LABEL,\n };\n\n let search_text = Self::format_search(regex, search_label, size_info.columns());\n\n // Render the search bar.\n self.draw_search(config, &search_text);\n\n // Draw search bar cursor.\n let line = size_info.screen_lines();\n let column = Column(search_text.chars().count() - 1);\n\n", "meta": {"hash_id": "c20bfcb216ac981870022b5ffddbfa9b53bc1cf353566e9a690e17e3f4d6f544"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 43, "content": " // Add cursor to search bar if IME is not active.\n if self.ime.preedit().is_none() {\n let fg = config.colors.footer_bar_foreground();\n let shape = CursorShape::Underline;\n let cursor = RenderableCursor::new(Point::new(line, column), shape, fg, false);\n rects.extend(cursor.rects(&size_info, config.cursor.thickness()));\n }\n\n Some(Point::new(line, column))\n },\n None => {\n let num_lines = self.size_info.screen_lines();\n term::point_to_viewport(display_offset, cursor_point)\n .filter(|point| point.line < num_lines)\n },\n };\n\n", "meta": {"hash_id": "350ea9d2cd9bdced937484a43d5eac3fbedf6ed7063a0928056225ec2ca7058e"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 44, "content": " // Handle IME.\n if self.ime.is_enabled() {\n if let Some(point) = ime_position {\n let (fg, bg) = if search_state.regex().is_some() {\n (config.colors.footer_bar_foreground(), config.colors.footer_bar_background())\n } else {\n (foreground_color, background_color)\n };\n\n self.draw_ime_preview(point, fg, bg, &mut rects, config);\n }\n }\n\n if let Some(message) = message_buffer.message() {\n let search_offset = usize::from(search_state.regex().is_some());\n let text = message.text(&size_info);\n\n", "meta": {"hash_id": "5b742665b72ab3ff0a08013a146530fe6e6830cef61bce1a7f79aebf1eb67555"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 45, "content": " // Create a new rectangle for the background.\n let start_line = size_info.screen_lines() + search_offset;\n let y = size_info.cell_height().mul_add(start_line as f32, size_info.padding_y());\n\n let bg = match message.ty() {\n MessageType::Error => config.colors.normal.red,\n MessageType::Warning => config.colors.normal.yellow,\n };\n\n", "meta": {"hash_id": "a0a75ee6d8209be0e7e86de929d23cb6b231aa453c4bceb95b062a5bdbd0d51b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 46, "content": " let x = 0;\n let width = size_info.width() as i32;\n let height = (size_info.height() - y) as i32;\n let message_bar_rect =\n RenderRect::new(x as f32, y, width as f32, height as f32, bg, 1.);\n\n // Push message_bar in the end, so it'll be above all other content.\n rects.push(message_bar_rect);\n\n // Always damage message bar, since it could have messages of the same size in it.\n self.damage_tracker.frame().add_viewport_rect(&size_info, x, y as i32, width, height);\n\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n\n", "meta": {"hash_id": "7ff46d8ef5953994171ae2588af562115c117c5a10cdaccffbff5610b61c30d9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 47, "content": " // Relay messages to the user.\n let glyph_cache = &mut self.glyph_cache;\n let fg = config.colors.primary.background;\n for (i, message_text) in text.iter().enumerate() {\n let point = Point::new(start_line + i, Column(0));\n self.renderer.draw_string(\n point,\n fg,\n bg,\n message_text.chars(),\n &size_info,\n glyph_cache,\n );\n }\n } else {\n // Draw rectangles.\n self.renderer.draw_rects(&size_info, &metrics, rects);\n }\n\n", "meta": {"hash_id": "4301e36e0494c3864c04e5e18d5714c4a38bacf6c448ef0f4b761c5a76781744"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 48, "content": " self.draw_render_timer(config);\n\n // Draw hyperlink uri preview.\n if has_highlighted_hint {\n let cursor_point = vi_cursor_point.or(Some(cursor_point));\n self.draw_hyperlink_preview(config, cursor_point, display_offset);\n }\n\n // Notify winit that we're about to present.\n self.window.pre_present_notify();\n\n // Highlight damage for debugging.\n if self.damage_tracker.debug {\n let damage = self.damage_tracker.shape_frame_damage(self.size_info.into());\n let mut rects = Vec::with_capacity(damage.len());\n self.highlight_damage(&mut rects);\n self.renderer.draw_rects(&self.size_info, &metrics, rects);\n }\n\n // Clearing debug highlights from the previous frame requires full redraw.\n self.swap_buffers();\n\n", "meta": {"hash_id": "634ea3f07419e628e451619fac284b5833898edc6e6206e6477f6b9397a2e163"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 49, "content": " if matches!(self.raw_window_handle, RawWindowHandle::Xcb(_) | RawWindowHandle::Xlib(_)) {\n // On X11 `swap_buffers` does not block for vsync. However the next OpenGl command\n // will block to synchronize (this is `glClear` in Alacritty), which causes a\n // permanent one frame delay.\n self.renderer.finish();\n }\n\n // XXX: Request the new frame after swapping buffers, so the\n // time to finish OpenGL operations is accounted for in the timeout.\n if !matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) {\n self.request_frame(scheduler);\n }\n\n self.damage_tracker.swap_damage();\n }\n\n /// Update to a new configuration.\n pub fn update_config(&mut self, config: &UiConfig) {\n self.damage_tracker.debug = config.debug.highlight_damage;\n self.visual_bell.update_config(&config.bell);\n self.colors = List::from(&config.colors);\n }\n\n", "meta": {"hash_id": "8193ff2c96e4891d02efb66bc8a53c84ac112a628fd77195c003536960b3267c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 50, "content": " /// Update the mouse/vi mode cursor hint highlighting.\n ///\n /// This will return whether the highlighted hints changed.\n pub fn update_highlighted_hints(\n &mut self,\n term: &Term,\n config: &UiConfig,\n mouse: &Mouse,\n modifiers: ModifiersState,\n ) -> bool {\n // Update vi mode cursor hint.\n let vi_highlighted_hint = if term.mode().contains(TermMode::VI) {\n let mods = ModifiersState::all();\n let point = term.vi_mode_cursor.point;\n hint::highlighted_at(term, config, point, mods)\n } else {\n None\n };\n let mut dirty = vi_highlighted_hint != self.vi_highlighted_hint;\n self.vi_highlighted_hint = vi_highlighted_hint;\n\n", "meta": {"hash_id": "ab85133046538b326b5da625910dbea49fdcfb5196b1302be9d7b38edfd2ecb1"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 51, "content": " // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n", "meta": {"hash_id": "c4e406929070cc79996d7fb380bd3196d1e17ec94dabeb5bf0058a1b83c8c4ca"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 52, "content": " // Update cursor shape.\n if highlighted_hint.is_some() {\n // If mouse changed the line, we should update the hyperlink preview, since the\n // highlighted hint could be disrupted by the old preview.\n dirty = self.hint_mouse_point.map_or(false, |p| p.line != point.line);\n self.hint_mouse_point = Some(point);\n self.window.set_mouse_cursor(CursorIcon::Pointer);\n } else if self.highlighted_hint.is_some() {\n self.hint_mouse_point = None;\n if term.mode().intersects(TermMode::MOUSE_MODE) && !term.mode().contains(TermMode::VI) {\n self.window.set_mouse_cursor(CursorIcon::Default);\n } else {\n self.window.set_mouse_cursor(CursorIcon::Text);\n }\n }\n\n", "meta": {"hash_id": "7a604714c947c7e581d69f25990ca030860f8076664f95c15614176b02f14f31"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 53, "content": " dirty |= self.highlighted_hint != highlighted_hint;\n self.highlighted_hint = highlighted_hint;\n\n dirty\n }\n\n #[inline(never)]\n fn draw_ime_preview(\n &mut self,\n point: Point,\n fg: Rgb,\n bg: Rgb,\n rects: &mut Vec,\n config: &UiConfig,\n ) {\n let preedit = match self.ime.preedit() {\n Some(preedit) => preedit,\n None => {\n // In case we don't have preedit, just set the popup point.\n self.window.update_ime_position(point, &self.size_info);\n return;\n },\n };\n\n let num_cols = self.size_info.columns();\n\n", "meta": {"hash_id": "36438e7aceac12b36ca1853743dbd414cc5a4eb8e6aa275d9cff045b7e2d4c88"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 54, "content": " // Get the visible preedit.\n let visible_text: String = match (preedit.cursor_byte_offset, preedit.cursor_end_offset) {\n (Some(byte_offset), Some(end_offset)) if end_offset > num_cols => StrShortener::new(\n &preedit.text[byte_offset..],\n num_cols,\n ShortenDirection::Right,\n Some(SHORTENER),\n ),\n _ => {\n StrShortener::new(&preedit.text, num_cols, ShortenDirection::Left, Some(SHORTENER))\n },\n }\n .collect();\n\n", "meta": {"hash_id": "02e3f7e39d36be809a0819dc9c541961971649fb2aa331c7c50fa4ec5e119978"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 55, "content": " let visible_len = visible_text.chars().count();\n\n let end = cmp::min(point.column.0 + visible_len, num_cols);\n let start = end.saturating_sub(visible_len);\n\n let start = Point::new(point.line, Column(start));\n let end = Point::new(point.line, Column(end - 1));\n\n let glyph_cache = &mut self.glyph_cache;\n let metrics = glyph_cache.font_metrics();\n\n self.renderer.draw_string(\n start,\n fg,\n bg,\n visible_text.chars(),\n &self.size_info,\n glyph_cache,\n );\n\n // Damage preedit inside the terminal viewport.\n if self.collect_damage() && point.line < self.size_info.screen_lines() {\n let damage = LineDamageBounds::new(start.line, 0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n", "meta": {"hash_id": "f4465b191a88d6c08068b0e33f39d884f63a67cc6829e896f24ecc1b96158940"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 56, "content": " // Add underline for preedit text.\n let underline = RenderLine { start, end, color: fg };\n rects.extend(underline.rects(Flags::UNDERLINE, &metrics, &self.size_info));\n\n let ime_popup_point = match preedit.cursor_end_offset {\n Some(cursor_end_offset) if cursor_end_offset != 0 => {\n let is_wide = preedit.text[preedit.cursor_byte_offset.unwrap_or_default()..]\n .chars()\n .next()\n .map(|ch| ch.width() == Some(2))\n .unwrap_or_default();\n\n", "meta": {"hash_id": "6a6a63878025944ce98f665e2365a98949fe34df15776aa07ce28cc0056d96e3"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 57, "content": " let cursor_column = Column(\n (end.column.0 as isize - cursor_end_offset as isize + 1).max(0) as usize,\n );\n let cursor_point = Point::new(point.line, cursor_column);\n let cursor =\n RenderableCursor::new(cursor_point, CursorShape::HollowBlock, fg, is_wide);\n rects.extend(cursor.rects(&self.size_info, config.cursor.thickness()));\n cursor_point\n },\n _ => end,\n };\n\n", "meta": {"hash_id": "0999c2483d874c5f24ec5bbd6d71bd76a775140cbd0ab32eebb7e16a14576bbb"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 58, "content": " self.window.update_ime_position(ime_popup_point, &self.size_info);\n }\n\n /// Format search regex to account for the cursor and fullwidth characters.\n fn format_search(search_regex: &str, search_label: &str, max_width: usize) -> String {\n let label_len = search_label.len();\n\n // Skip `search_regex` formatting if only label is visible.\n if label_len > max_width {\n return search_label[..max_width].to_owned();\n }\n\n", "meta": {"hash_id": "3b5e31b08e83473461ab3d08a6586fcc45fe7f018892e0be697974a32cf99556"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 59, "content": " // The search string consists of `search_label` + `search_regex` + `cursor`.\n let mut bar_text = String::from(search_label);\n bar_text.extend(StrShortener::new(\n search_regex,\n max_width.wrapping_sub(label_len + 1),\n ShortenDirection::Left,\n Some(SHORTENER),\n ));\n\n // Add place for cursor.\n bar_text.push(' ');\n\n bar_text\n }\n\n", "meta": {"hash_id": "778d02069dba2c206695f9644f6da84d4059373e565c3a0a3582cb79e27fff53"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 60, "content": " /// Draw preview for the currently highlighted `Hyperlink`.\n #[inline(never)]\n fn draw_hyperlink_preview(\n &mut self,\n config: &UiConfig,\n cursor_point: Option,\n display_offset: usize,\n ) {\n let num_cols = self.size_info.columns();\n let uris: Vec<_> = self\n .highlighted_hint\n .iter()\n .chain(&self.vi_highlighted_hint)\n .filter_map(|hint| hint.hyperlink().map(|hyperlink| hyperlink.uri()))\n .map(|uri| StrShortener::new(uri, num_cols, ShortenDirection::Right, Some(SHORTENER)))\n .collect();\n\n", "meta": {"hash_id": "0b58c59db2b1bd7885858bc980e47aae6f8e8bb2087ed85e4fde922bfca11125"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 61, "content": " if uris.is_empty() {\n return;\n }\n\n // The maximum amount of protected lines including the ones we'll show preview on.\n let max_protected_lines = uris.len() * 2;\n\n // Lines we shouldn't show preview on, because it'll obscure the highlighted hint.\n let mut protected_lines = Vec::with_capacity(max_protected_lines);\n if self.size_info.screen_lines() > max_protected_lines {\n // Prefer to show preview even when it'll likely obscure the highlighted hint, when\n // there's no place left for it.\n protected_lines.push(self.hint_mouse_point.map(|point| point.line));\n protected_lines.push(cursor_point.map(|point| point.line));\n }\n\n", "meta": {"hash_id": "783ec3774b49fcca910661e627316fe5e9efbeffe175a624958653998950f878"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 62, "content": " // Find the line in viewport we can draw preview on without obscuring protected lines.\n let viewport_bottom = self.size_info.bottommost_line() - Line(display_offset as i32);\n let viewport_top = viewport_bottom - (self.size_info.screen_lines() - 1);\n let uri_lines = (viewport_top.0..=viewport_bottom.0)\n .rev()\n .map(|line| Some(Line(line)))\n .filter_map(|line| {\n if protected_lines.contains(&line) {\n None\n } else {\n protected_lines.push(line);\n line\n }\n })\n .take(uris.len())\n .flat_map(|line| term::point_to_viewport(display_offset, Point::new(line, Column(0))));\n\n", "meta": {"hash_id": "f16942c0759faa75a9115d4645762a3ad51c204eae0f9765e0f0c1e2d6b66385"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 63, "content": " let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n for (uri, point) in uris.into_iter().zip(uri_lines) {\n // Damage the uri preview.\n if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, num_cols);\n self.damage_tracker.frame().damage_line(damage);\n\n // Damage the uri preview for the next frame as well.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n self.renderer.draw_string(point, fg, bg, uri, &self.size_info, &mut self.glyph_cache);\n }\n }\n\n", "meta": {"hash_id": "e310b116e108b9ec16c1b9f09d5e70f193b7b368a4b94c97682448e4fbb308c6"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 64, "content": " /// Draw current search regex.\n #[inline(never)]\n fn draw_search(&mut self, config: &UiConfig, text: &str) {\n // Assure text length is at least num_cols.\n let num_cols = self.size_info.columns();\n let text = format!(\"{:<1$}\", text, num_cols);\n\n let point = Point::new(self.size_info.screen_lines(), Column(0));\n\n let fg = config.colors.footer_bar_foreground();\n let bg = config.colors.footer_bar_background();\n\n", "meta": {"hash_id": "94c9b5e79fb5bd80da00b4ecbe94fd9020f9e4110ea6656f13003da0f41cda9a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 65, "content": " self.renderer.draw_string(\n point,\n fg,\n bg,\n text.chars(),\n &self.size_info,\n &mut self.glyph_cache,\n );\n }\n\n /// Draw render timer.\n #[inline(never)]\n fn draw_render_timer(&mut self, config: &UiConfig) {\n if !config.debug.render_timer {\n return;\n }\n\n let timing = format!(\"{:.3} usec\", self.meter.average());\n let point = Point::new(self.size_info.screen_lines().saturating_sub(2), Column(0));\n let fg = config.colors.primary.background;\n let bg = config.colors.normal.red;\n\n", "meta": {"hash_id": "704ee1cc058f89cf5781311565df241f3affc1a6a6b3665420dbf77bb5cad1af"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 66, "content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, timing.len());\n self.damage_tracker.frame().damage_line(damage);\n // Damage the render timer for the next frame.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, timing.chars(), &self.size_info, glyph_cache);\n }\n\n", "meta": {"hash_id": "490dbac953cf0c174cdebed878c4b2813294da5220a60dbca8b8f17160fdb1ad"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 67, "content": " /// Draw an indicator for the position of a line in history.\n #[inline(never)]\n fn draw_line_indicator(\n &mut self,\n config: &UiConfig,\n total_lines: usize,\n obstructed_column: Option,\n line: usize,\n ) {\n let columns = self.size_info.columns();\n let text = format!(\"[{}/{}]\", line, total_lines - 1);\n let column = Column(self.size_info.columns().saturating_sub(text.len()));\n let point = Point::new(0, column);\n\n", "meta": {"hash_id": "3808ecaf15da81a70e597a07f5c5da5ba4c17b1cf6f0dcfa2adf12f3bdbb475d"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 68, "content": " if self.collect_damage() {\n let damage = LineDamageBounds::new(point.line, point.column.0, columns - 1);\n self.damage_tracker.frame().damage_line(damage);\n // Damage it on the next frame in case it goes away.\n self.damage_tracker.next_frame().damage_line(damage);\n }\n\n let colors = &config.colors;\n let fg = colors.line_indicator.foreground.unwrap_or(colors.primary.background);\n let bg = colors.line_indicator.background.unwrap_or(colors.primary.foreground);\n\n", "meta": {"hash_id": "5c1d0985bc1d7908fb5c317620576ac17fb3493eabd4d7d8fa3fefd35ab7ce9b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 69, "content": " // Do not render anything if it would obscure the vi mode cursor.\n if obstructed_column.map_or(true, |obstructed_column| obstructed_column < column) {\n let glyph_cache = &mut self.glyph_cache;\n self.renderer.draw_string(point, fg, bg, text.chars(), &self.size_info, glyph_cache);\n }\n }\n\n /// Returns `true` if damage information should be collected, `false` otherwise.\n #[inline]\n fn collect_damage(&self) -> bool {\n matches!(self.raw_window_handle, RawWindowHandle::Wayland(_)) || self.damage_tracker.debug\n }\n\n", "meta": {"hash_id": "2962a6868ce13297567593afea28482302fc693fe7e9f861f1a396aac68e724b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 70, "content": " /// Highlight damaged rects.\n ///\n /// This function is for debug purposes only.\n fn highlight_damage(&self, render_rects: &mut Vec) {\n for damage_rect in &self.damage_tracker.shape_frame_damage(self.size_info.into()) {\n let x = damage_rect.x as f32;\n let height = damage_rect.height as f32;\n let width = damage_rect.width as f32;\n let y = damage_y_to_viewport_y(&self.size_info, damage_rect) as f32;\n let render_rect = RenderRect::new(x, y, width, height, DAMAGE_RECT_COLOR, 0.5);\n\n", "meta": {"hash_id": "0f29cbbaec96d2dd0f1633b9ce7032e7f484e9c69d2639e8edd402f087889cbf"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 71, "content": " render_rects.push(render_rect);\n }\n }\n\n /// Request a new frame for a window on Wayland.\n fn request_frame(&mut self, scheduler: &mut Scheduler) {\n // Mark that we've used a frame.\n self.window.has_frame = false;\n\n // Get the display vblank interval.\n let monitor_vblank_interval = 1_000_000.\n / self\n .window\n .current_monitor()\n .and_then(|monitor| monitor.refresh_rate_millihertz())\n .unwrap_or(60_000) as f64;\n\n", "meta": {"hash_id": "cb8e18791a8d3f8818d32f820bb15fb163aa24833619cb3ad94e4d6e93bedbed"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 72, "content": " // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\n", "meta": {"hash_id": "30e322a3414ba5e54e2599830958a3907fe0b065035a8c9cb418bf4c1d72efcd"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 73, "content": "impl Drop for Display {\n fn drop(&mut self) {\n // Switch OpenGL context before dropping, otherwise objects (like programs) from other\n // contexts might be deleted when dropping renderer.\n self.make_current();\n unsafe {\n ManuallyDrop::drop(&mut self.renderer);\n ManuallyDrop::drop(&mut self.context);\n ManuallyDrop::drop(&mut self.surface);\n }\n }\n}\n\n", "meta": {"hash_id": "bddb13186a6094969454c24a3f01f270d1b6b0e11ab07064f85194819b7209c9"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 74, "content": "/// Input method state.\n#[derive(Debug, Default)]\npub struct Ime {\n /// Whether the IME is enabled.\n enabled: bool,\n\n /// Current IME preedit.\n preedit: Option,\n}\n\nimpl Ime {\n #[inline]\n pub fn set_enabled(&mut self, is_enabled: bool) {\n if is_enabled {\n self.enabled = is_enabled\n } else {\n // Clear state when disabling IME.\n *self = Default::default();\n }\n }\n\n #[inline]\n pub fn is_enabled(&self) -> bool {\n self.enabled\n }\n\n #[inline]\n pub fn set_preedit(&mut self, preedit: Option) {\n self.preedit = preedit;\n }\n\n #[inline]\n pub fn preedit(&self) -> Option<&Preedit> {\n self.preedit.as_ref()\n }\n}\n\n#[derive(Debug, Default, PartialEq, Eq)]\npub struct Preedit {\n /// The preedit text.\n text: String,\n\n", "meta": {"hash_id": "3f9b2205ea82cbcbaa666a59e8888ccf1ec69f058ede785adacc2f44e5761c1a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 75, "content": " /// Byte offset for cursor start into the preedit text.\n ///\n /// `None` means that the cursor is invisible.\n cursor_byte_offset: Option,\n\n /// The cursor offset from the end of the preedit in char width.\n cursor_end_offset: Option,\n}\n\nimpl Preedit {\n pub fn new(text: String, cursor_byte_offset: Option) -> Self {\n let cursor_end_offset = if let Some(byte_offset) = cursor_byte_offset {\n // Convert byte offset into char offset.\n let cursor_end_offset =\n text[byte_offset..].chars().fold(0, |acc, ch| acc + ch.width().unwrap_or(1));\n\n", "meta": {"hash_id": "c1bdc732d49c892d75f5fd3a6d0ab64d0f23a87b1bf485ad1c252bd881d09ba2"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 76, "content": " Some(cursor_end_offset)\n } else {\n None\n };\n\n Self { text, cursor_byte_offset, cursor_end_offset }\n }\n}\n\n/// Pending renderer updates.\n///\n/// All renderer updates are cached to be applied just before rendering, to avoid platform-specific\n/// rendering issues.\n#[derive(Debug, Default, Copy, Clone)]\npub struct RendererUpdate {\n /// Should resize the window.\n resize: bool,\n\n /// Clear font caches.\n clear_font_cache: bool,\n}\n\n/// Struct for safe in-place replacement.\n///\n/// This struct allows easily replacing struct fields that provide `self -> Self` methods in-place,\n/// without having to deal with constantly unwrapping the underlying [`Option`].\nstruct Replaceable(Option);\n\n", "meta": {"hash_id": "91b7c3746a5eb87290de64f6be7afc6266d4e43ccc5cbec4eb0b4dfddbb6132f"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 77, "content": "impl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\n", "meta": {"hash_id": "d3d4d8f8720963a367d8f5bf74244739d252412ce356f463bbe3ed09eb00f2de"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 78, "content": "impl Deref for Replaceable {\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n self.get()\n }\n}\n\nimpl DerefMut for Replaceable {\n fn deref_mut(&mut self) -> &mut Self::Target {\n self.get_mut()\n }\n}\n\n/// The frame timer state.\npub struct FrameTimer {\n /// Base timestamp used to compute sync points.\n base: Instant,\n\n /// The last timestamp we synced to.\n last_synced_timestamp: Instant,\n\n /// The refresh rate we've used to compute sync timestamps.\n refresh_interval: Duration,\n}\n\nimpl FrameTimer {\n pub fn new() -> Self {\n let now = Instant::now();\n Self { base: now, last_synced_timestamp: now, refresh_interval: Duration::ZERO }\n }\n\n", "meta": {"hash_id": "26064c14ae007ff16e77137660718d45e30767b2eaea74e0ab974f76a635a2b0"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 79, "content": " /// Compute the delay that we should use to achieve the target frame\n /// rate.\n pub fn compute_timeout(&mut self, refresh_interval: Duration) -> Duration {\n let now = Instant::now();\n\n // Handle refresh rate change.\n if self.refresh_interval != refresh_interval {\n self.base = now;\n self.last_synced_timestamp = now;\n self.refresh_interval = refresh_interval;\n return refresh_interval;\n }\n\n let next_frame = self.last_synced_timestamp + self.refresh_interval;\n\n", "meta": {"hash_id": "51cde94b51707ea3131697086f758f952d2245228eb81581b37f916f06f5824c"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 80, "content": " if next_frame < now {\n // Redraw immediately if we haven't drawn in over `refresh_interval` microseconds.\n let elapsed_micros = (now - self.base).as_micros() as u64;\n let refresh_micros = self.refresh_interval.as_micros() as u64;\n self.last_synced_timestamp =\n now - Duration::from_micros(elapsed_micros % refresh_micros);\n Duration::ZERO\n } else {\n // Redraw on the next `refresh_interval` clock tick.\n self.last_synced_timestamp = next_frame;\n next_frame - now\n }\n }\n}\n\n", "meta": {"hash_id": "3b7b39e20e3e2536fde3cc1b209c02f9909c9eeedf7fde9dfdad330f1345359b"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 81, "content": "/// Calculate the cell dimensions based on font metrics.\n///\n/// This will return a tuple of the cell width and height.\n#[inline]\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n let offset_x = f64::from(config.font.offset.x);\n let offset_y = f64::from(config.font.offset.y);\n (\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n )\n}\n\n", "meta": {"hash_id": "e60e12c31ebbe6a66ef26ed43c604bb403db5158fc9be3f62f599f20819fe982"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 82, "content": "/// Calculate the size of the window given padding, terminal dimensions and cell size.\nfn window_size(\n config: &UiConfig,\n dimensions: Dimensions,\n cell_width: f32,\n cell_height: f32,\n scale_factor: f32,\n) -> PhysicalSize {\n let padding = config.window.padding(scale_factor);\n\n let grid_width = cell_width * dimensions.columns.max(MIN_COLUMNS) as f32;\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n PhysicalSize::new(width as u32, height as u32)\n}\n", "meta": {"hash_id": "e522ae0b3b63eb29c51c18a5828f1c0bd1bc3facf9bd57c05e8128ab5abd7a1c"}}]}], "golden_chunks": [{"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 77, "content": "impl Replaceable {\n pub fn new(inner: T) -> Self {\n Self(Some(inner))\n }\n\n /// Replace the contents of the container.\n pub fn replace_with T>(&mut self, f: F) {\n self.0 = self.0.take().map(f);\n }\n\n /// Get immutable access to the wrapped value.\n pub fn get(&self) -> &T {\n self.0.as_ref().unwrap()\n }\n\n /// Get mutable access to the wrapped value.\n pub fn get_mut(&mut self) -> &mut T {\n self.0.as_mut().unwrap()\n }\n}\n\n", "meta": {"hash_id": "d3d4d8f8720963a367d8f5bf74244739d252412ce356f463bbe3ed09eb00f2de"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 72, "content": " // Now convert it to micro seconds.\n let monitor_vblank_interval =\n Duration::from_micros((1000. * monitor_vblank_interval) as u64);\n\n let swap_timeout = self.frame_timer.compute_timeout(monitor_vblank_interval);\n\n let window_id = self.window.id();\n let timer_id = TimerId::new(Topic::Frame, window_id);\n let event = Event::new(EventType::Frame, window_id);\n\n scheduler.schedule(event, swap_timeout, false, timer_id);\n }\n}\n\n", "meta": {"hash_id": "30e322a3414ba5e54e2599830958a3907fe0b065035a8c9cb418bf4c1d72efcd"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 51, "content": " // Abort if mouse highlighting conditions are not met.\n if !mouse.inside_text_area || !term.selection.as_ref().map_or(true, Selection::is_empty) {\n dirty |= self.highlighted_hint.is_some();\n self.highlighted_hint = None;\n return dirty;\n }\n\n // Find highlighted hint at mouse position.\n let point = mouse.point(&self.size_info, term.grid().display_offset());\n let highlighted_hint = hint::highlighted_at(term, config, point, modifiers);\n\n", "meta": {"hash_id": "c4e406929070cc79996d7fb380bd3196d1e17ec94dabeb5bf0058a1b83c8c4ca"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 26, "content": " // XXX: this function must not call to any `OpenGL` related tasks. Renderer updates are\n // performed in [`Self::process_renderer_update`] right before drawing.\n //\n /// Process update events.\n pub fn handle_update(\n &mut self,\n terminal: &mut Term,\n pty_resize_handle: &mut dyn OnResize,\n message_buffer: &MessageBuffer,\n search_state: &mut SearchState,\n config: &UiConfig,\n ) where\n T: EventListener,\n {\n let pending_update = mem::take(&mut self.pending_update);\n\n", "meta": {"hash_id": "ed1f56505a3374dd2edecc05ad0cee8c469187f9ecc26860492082baefccf9b5"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 16, "content": " let font_size = config.font.size().scale(scale_factor);\n debug!(\"Loading \\\"{}\\\" font\", &config.font.normal().family);\n let font = config.font.clone().with_size(font_size);\n let mut glyph_cache = GlyphCache::new(rasterizer, &font)?;\n\n let metrics = glyph_cache.font_metrics();\n let (cell_width, cell_height) = compute_cell_size(config, &metrics);\n\n // Resize the window to account for the user configured size.\n if let Some(dimensions) = config.window.dimensions() {\n let size = window_size(config, dimensions, cell_width, cell_height, scale_factor);\n window.request_inner_size(size);\n }\n\n", "meta": {"hash_id": "72b2e89ab1dc3d3689c6c98fec13bb24aadee6692a3837c6580dbc15874b986a"}}, {"doc_uuid": "96be8bd624e32a74578a45205b0da1cf48669382263d771180360d5a4f40e60b", "index": 15, "content": " /// The ime on the given display.\n pub ime: Ime,\n\n /// The state of the timer for frame scheduling.\n pub frame_timer: FrameTimer,\n\n /// Damage tracker for the given display.\n pub damage_tracker: DamageTracker,\n\n /// Font size used by the window.\n pub font_size: FontSize,\n\n // Mouse point position when highlighting hints.\n hint_mouse_point: Option,\n\n renderer: ManuallyDrop,\n\n surface: ManuallyDrop>,\n\n context: ManuallyDrop>,\n\n glyph_cache: GlyphCache,\n meter: Meter,\n}\n\nimpl Display {\n pub fn new(\n window: Window,\n gl_context: NotCurrentContext,\n config: &UiConfig,\n _tabbed: bool,\n ) -> Result {\n let raw_window_handle = window.raw_window_handle();\n\n let scale_factor = window.scale_factor as f32;\n let rasterizer = Rasterizer::new()?;\n\n", "meta": {"hash_id": "b15f77642e7ceded8462e22b6ac3f81c2205a35d59841fdc3fe6ad8c4ab9f3ae"}}], "meta": {"doctype": "codebase", "relative_path": "/alacritty/src/display/mod.rs", "repo_name": "alacritty/alacritty", "num_chunks": 83, "source": "codebases/jsonl"}} +{"query": "What class does HasWeightCol extend?", "answer": "HasWeightCol extends the WithParams interface, as seen in the declaration `public interface HasWeightCol extends WithParams`.", "golden_doc_uuids": ["9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65"], "golden_chunk_uuids": [["9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", 1]], "golden_documents": [{"uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-servable-lib/src/main/java/org/apache/flink/ml/common/param/HasWeightCol.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "7e756bd99ae845cea7322d2982674ad2569ee7269319db6e9d737110926dac29"}}, {"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 1, "content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"hash_id": "11c2aa7677ee27ba83e9ec4f2aadb4a84e42eeb107d19a01f965556196122acd"}}]}], "golden_chunks": [{"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 1, "content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"hash_id": "11c2aa7677ee27ba83e9ec4f2aadb4a84e42eeb107d19a01f965556196122acd"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-servable-lib/src/main/java/org/apache/flink/ml/common/param/HasWeightCol.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the name and description of the weight column parameter?", "answer": "The weight column parameter is defined as `Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);`. Its name is \"weightCol\" and the description is \"Weight column name.\"", "golden_doc_uuids": ["9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65"], "golden_chunk_uuids": [["9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", 1]], "golden_documents": [{"uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-servable-lib/src/main/java/org/apache/flink/ml/common/param/HasWeightCol.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "7e756bd99ae845cea7322d2982674ad2569ee7269319db6e9d737110926dac29"}}, {"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 1, "content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"hash_id": "11c2aa7677ee27ba83e9ec4f2aadb4a84e42eeb107d19a01f965556196122acd"}}]}], "golden_chunks": [{"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 1, "content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"hash_id": "11c2aa7677ee27ba83e9ec4f2aadb4a84e42eeb107d19a01f965556196122acd"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-servable-lib/src/main/java/org/apache/flink/ml/common/param/HasWeightCol.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How can I get the value of the weight column parameter?", "answer": "The value of the weight column parameter can be retrieved using the `getWeightCol()` method, which is defined as `default String getWeightCol() { return get(WEIGHT_COL); }`.", "golden_doc_uuids": ["9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65"], "golden_chunk_uuids": [["9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", 1]], "golden_documents": [{"uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-servable-lib/src/main/java/org/apache/flink/ml/common/param/HasWeightCol.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "7e756bd99ae845cea7322d2982674ad2569ee7269319db6e9d737110926dac29"}}, {"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 1, "content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"hash_id": "11c2aa7677ee27ba83e9ec4f2aadb4a84e42eeb107d19a01f965556196122acd"}}]}], "golden_chunks": [{"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 1, "content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"hash_id": "11c2aa7677ee27ba83e9ec4f2aadb4a84e42eeb107d19a01f965556196122acd"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-servable-lib/src/main/java/org/apache/flink/ml/common/param/HasWeightCol.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the default value of the weight column parameter?", "answer": "The default value of the weight column parameter is null, as specified in the parameter definition: `new StringParam(\"weightCol\", \"Weight column name.\", null)`.", "golden_doc_uuids": ["9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65"], "golden_chunk_uuids": [["9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", 1]], "golden_documents": [{"uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-servable-lib/src/main/java/org/apache/flink/ml/common/param/HasWeightCol.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "7e756bd99ae845cea7322d2982674ad2569ee7269319db6e9d737110926dac29"}}, {"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 1, "content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"hash_id": "11c2aa7677ee27ba83e9ec4f2aadb4a84e42eeb107d19a01f965556196122acd"}}]}], "golden_chunks": [{"doc_uuid": "9ab68bb3dcc5b2e4bcb65ed03b08b261652c4930960efafea6380ca48c450c65", "index": 1, "content": "package org.apache.flink.ml.common.param;\n\nimport org.apache.flink.ml.param.Param;\nimport org.apache.flink.ml.param.StringParam;\nimport org.apache.flink.ml.param.WithParams;\n\n/**\n * Interface for the shared weight column param. If this is not set, we treat all instance weights\n * as 1.0.\n */\npublic interface HasWeightCol extends WithParams {\n Param WEIGHT_COL = new StringParam(\"weightCol\", \"Weight column name.\", null);\n\n default String getWeightCol() {\n return get(WEIGHT_COL);\n }\n\n default T setWeightCol(String colName) {\n return set(WEIGHT_COL, colName);\n }\n}\n", "meta": {"hash_id": "11c2aa7677ee27ba83e9ec4f2aadb4a84e42eeb107d19a01f965556196122acd"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-servable-lib/src/main/java/org/apache/flink/ml/common/param/HasWeightCol.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What parameters does the VectorSlicer class have?", "answer": "The VectorSlicer class inherits parameters from the _VectorSlicerParams mixin class. This includes the INDICES parameter, as seen in the code:\n```\nINDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n```\nIt also inherits the inputCol and outputCol parameters from the HasInputCol and HasOutputCol mixins.", "golden_doc_uuids": ["139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de"], "golden_chunk_uuids": [["139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", 1]], "golden_documents": [{"uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\nfrom typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n \"\"\"\n Params for :class:`VectorSlicer`.\n \"\"\"\n\n INDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n\n def __init__(self, java_params):\n super(_VectorSlicerParams, self).__init__(java_params)\n\n def set_indices(self, *ind: int):\n return self.set(self.INDICES, ind)\n\n def get_indices(self) -> Tuple[int, ...]:\n return self.get(self.INDICES)\n\n @property\n def indices(self) -> Tuple[int, ...]:\n return self.get_indices()\n\n\nclass VectorSlicer(JavaFeatureTransformer, _VectorSlicerParams):\n \"\"\"\n A Transformer that transforms a vector to a new feature, which is a sub-array of\n the original feature.It is useful for extracting features from a given vector.\n\n Note that duplicate features are not allowed, so there can be no overlap between\n selected indices. If the max value of the indices is greater than the size of\n the input vector, it throws an IllegalArgumentException.\n \"\"\"\n\n def __init__(self, java_model=None):\n super(VectorSlicer, self).__init__(java_model)\n\n @classmethod\n def _java_transformer_package_name(cls) -> str:\n return \"vectorslicer\"\n\n @classmethod\n def _java_transformer_class_name(cls) -> str:\n return \"VectorSlicer\"\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/vectorslicer.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 0, "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n", "meta": {"hash_id": "91ad85108c2506721611db8bc2d595f9df09304d9fd165af2f17cd2d1f6176c0"}}, {"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 1, "content": "from typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n", "meta": {"hash_id": "e6f756f2996140728e5c3aafd86d5727b685d3e2a337e9442d048a1ae5bd2de2"}}, {"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 2, "content": " \"\"\"\n Params for :class:`VectorSlicer`.\n \"\"\"\n\n INDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n\n def __init__(self, java_params):\n super(_VectorSlicerParams, self).__init__(java_params)\n\n def set_indices(self, *ind: int):\n return self.set(self.INDICES, ind)\n\n def get_indices(self) -> Tuple[int, ...]:\n return self.get(self.INDICES)\n\n @property\n def indices(self) -> Tuple[int, ...]:\n return self.get_indices()\n\n", "meta": {"hash_id": "fd2abdddd593aaa006d10052b4964a4b0f3c7b3c8da7a258450d49bc20f4763d"}}, {"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 3, "content": "\nclass VectorSlicer(JavaFeatureTransformer, _VectorSlicerParams):\n \"\"\"\n A Transformer that transforms a vector to a new feature, which is a sub-array of\n the original feature.It is useful for extracting features from a given vector.\n\n Note that duplicate features are not allowed, so there can be no overlap between\n selected indices. If the max value of the indices is greater than the size of\n the input vector, it throws an IllegalArgumentException.\n \"\"\"\n\n def __init__(self, java_model=None):\n super(VectorSlicer, self).__init__(java_model)\n\n @classmethod\n def _java_transformer_package_name(cls) -> str:\n return \"vectorslicer\"\n\n @classmethod\n def _java_transformer_class_name(cls) -> str:\n return \"VectorSlicer\"\n", "meta": {"hash_id": "d9b6bb8fc7414d39ac0078222556461f568f57e9d8ccf8edcbd77778130e9346"}}]}], "golden_chunks": [{"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 1, "content": "from typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n", "meta": {"hash_id": "e6f756f2996140728e5c3aafd86d5727b685d3e2a337e9442d048a1ae5bd2de2"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/vectorslicer.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "How do I specify the indices to slice from the input vector?", "answer": "To specify the indices to slice, you can use the `set_indices` method or set the `indices` property on a VectorSlicer instance. For example:\n```\nslicer = VectorSlicer()\nslicer.set_indices(1, 3, 5)\n# or equivalently\nslicer.indices = (1, 3, 5)\n```\nThis will configure the VectorSlicer to select elements at indices 1, 3, and 5 from the input vector.", "golden_doc_uuids": ["139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de"], "golden_chunk_uuids": [["139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", 1]], "golden_documents": [{"uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\nfrom typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n \"\"\"\n Params for :class:`VectorSlicer`.\n \"\"\"\n\n INDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n\n def __init__(self, java_params):\n super(_VectorSlicerParams, self).__init__(java_params)\n\n def set_indices(self, *ind: int):\n return self.set(self.INDICES, ind)\n\n def get_indices(self) -> Tuple[int, ...]:\n return self.get(self.INDICES)\n\n @property\n def indices(self) -> Tuple[int, ...]:\n return self.get_indices()\n\n\nclass VectorSlicer(JavaFeatureTransformer, _VectorSlicerParams):\n \"\"\"\n A Transformer that transforms a vector to a new feature, which is a sub-array of\n the original feature.It is useful for extracting features from a given vector.\n\n Note that duplicate features are not allowed, so there can be no overlap between\n selected indices. If the max value of the indices is greater than the size of\n the input vector, it throws an IllegalArgumentException.\n \"\"\"\n\n def __init__(self, java_model=None):\n super(VectorSlicer, self).__init__(java_model)\n\n @classmethod\n def _java_transformer_package_name(cls) -> str:\n return \"vectorslicer\"\n\n @classmethod\n def _java_transformer_class_name(cls) -> str:\n return \"VectorSlicer\"\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/vectorslicer.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 0, "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n", "meta": {"hash_id": "91ad85108c2506721611db8bc2d595f9df09304d9fd165af2f17cd2d1f6176c0"}}, {"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 1, "content": "from typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n", "meta": {"hash_id": "e6f756f2996140728e5c3aafd86d5727b685d3e2a337e9442d048a1ae5bd2de2"}}, {"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 2, "content": " \"\"\"\n Params for :class:`VectorSlicer`.\n \"\"\"\n\n INDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n\n def __init__(self, java_params):\n super(_VectorSlicerParams, self).__init__(java_params)\n\n def set_indices(self, *ind: int):\n return self.set(self.INDICES, ind)\n\n def get_indices(self) -> Tuple[int, ...]:\n return self.get(self.INDICES)\n\n @property\n def indices(self) -> Tuple[int, ...]:\n return self.get_indices()\n\n", "meta": {"hash_id": "fd2abdddd593aaa006d10052b4964a4b0f3c7b3c8da7a258450d49bc20f4763d"}}, {"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 3, "content": "\nclass VectorSlicer(JavaFeatureTransformer, _VectorSlicerParams):\n \"\"\"\n A Transformer that transforms a vector to a new feature, which is a sub-array of\n the original feature.It is useful for extracting features from a given vector.\n\n Note that duplicate features are not allowed, so there can be no overlap between\n selected indices. If the max value of the indices is greater than the size of\n the input vector, it throws an IllegalArgumentException.\n \"\"\"\n\n def __init__(self, java_model=None):\n super(VectorSlicer, self).__init__(java_model)\n\n @classmethod\n def _java_transformer_package_name(cls) -> str:\n return \"vectorslicer\"\n\n @classmethod\n def _java_transformer_class_name(cls) -> str:\n return \"VectorSlicer\"\n", "meta": {"hash_id": "d9b6bb8fc7414d39ac0078222556461f568f57e9d8ccf8edcbd77778130e9346"}}]}], "golden_chunks": [{"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 1, "content": "from typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n", "meta": {"hash_id": "e6f756f2996140728e5c3aafd86d5727b685d3e2a337e9442d048a1ae5bd2de2"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/vectorslicer.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "What restrictions are there on the indices parameter?", "answer": "The indices parameter has a few restrictions, which are enforced by the IndicesValidator class:\n1. All indices must be non-negative. \"If val < 0: return False\"\n2. Indices must be unique. \"indices_set = set(indices) if len(indices_set) != len(indices): return False\" \n3. At least one index must be specified. \"return len(indices_set) != 0\"\n\nAdditionally, the class docstring mentions: \"If the max value of the indices is greater than the size of the input vector, it throws an IllegalArgumentException.\"", "golden_doc_uuids": ["139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de"], "golden_chunk_uuids": [["139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", 1]], "golden_documents": [{"uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\nfrom typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n \"\"\"\n Params for :class:`VectorSlicer`.\n \"\"\"\n\n INDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n\n def __init__(self, java_params):\n super(_VectorSlicerParams, self).__init__(java_params)\n\n def set_indices(self, *ind: int):\n return self.set(self.INDICES, ind)\n\n def get_indices(self) -> Tuple[int, ...]:\n return self.get(self.INDICES)\n\n @property\n def indices(self) -> Tuple[int, ...]:\n return self.get_indices()\n\n\nclass VectorSlicer(JavaFeatureTransformer, _VectorSlicerParams):\n \"\"\"\n A Transformer that transforms a vector to a new feature, which is a sub-array of\n the original feature.It is useful for extracting features from a given vector.\n\n Note that duplicate features are not allowed, so there can be no overlap between\n selected indices. If the max value of the indices is greater than the size of\n the input vector, it throws an IllegalArgumentException.\n \"\"\"\n\n def __init__(self, java_model=None):\n super(VectorSlicer, self).__init__(java_model)\n\n @classmethod\n def _java_transformer_package_name(cls) -> str:\n return \"vectorslicer\"\n\n @classmethod\n def _java_transformer_class_name(cls) -> str:\n return \"VectorSlicer\"\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/vectorslicer.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 0, "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n", "meta": {"hash_id": "91ad85108c2506721611db8bc2d595f9df09304d9fd165af2f17cd2d1f6176c0"}}, {"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 1, "content": "from typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n", "meta": {"hash_id": "e6f756f2996140728e5c3aafd86d5727b685d3e2a337e9442d048a1ae5bd2de2"}}, {"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 2, "content": " \"\"\"\n Params for :class:`VectorSlicer`.\n \"\"\"\n\n INDICES: Param[Tuple[int, ...]] = IntArrayParam(\n \"indices\",\n \"An array of indices to select features from a vector column.\",\n None,\n indices_validator(None))\n\n def __init__(self, java_params):\n super(_VectorSlicerParams, self).__init__(java_params)\n\n def set_indices(self, *ind: int):\n return self.set(self.INDICES, ind)\n\n def get_indices(self) -> Tuple[int, ...]:\n return self.get(self.INDICES)\n\n @property\n def indices(self) -> Tuple[int, ...]:\n return self.get_indices()\n\n", "meta": {"hash_id": "fd2abdddd593aaa006d10052b4964a4b0f3c7b3c8da7a258450d49bc20f4763d"}}, {"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 3, "content": "\nclass VectorSlicer(JavaFeatureTransformer, _VectorSlicerParams):\n \"\"\"\n A Transformer that transforms a vector to a new feature, which is a sub-array of\n the original feature.It is useful for extracting features from a given vector.\n\n Note that duplicate features are not allowed, so there can be no overlap between\n selected indices. If the max value of the indices is greater than the size of\n the input vector, it throws an IllegalArgumentException.\n \"\"\"\n\n def __init__(self, java_model=None):\n super(VectorSlicer, self).__init__(java_model)\n\n @classmethod\n def _java_transformer_package_name(cls) -> str:\n return \"vectorslicer\"\n\n @classmethod\n def _java_transformer_class_name(cls) -> str:\n return \"VectorSlicer\"\n", "meta": {"hash_id": "d9b6bb8fc7414d39ac0078222556461f568f57e9d8ccf8edcbd77778130e9346"}}]}], "golden_chunks": [{"doc_uuid": "139c40956655d0ac49fe337fed8ae7d4e2faaa694b327c7758b84d11169b02de", "index": 1, "content": "from typing import Tuple\nfrom pyflink.ml.wrapper import JavaWithParams\nfrom pyflink.ml.param import IntArrayParam, ParamValidator\nfrom pyflink.ml.feature.common import JavaFeatureTransformer\nfrom pyflink.ml.common.param import HasInputCol, HasOutputCol, Param\n\n\nclass _VectorSlicerParams(\n JavaWithParams,\n HasInputCol,\n HasOutputCol\n):\n \"\"\"\n Checks the indices parameter.\n \"\"\"\n\n def indices_validator(self) -> ParamValidator[Tuple[int]]:\n class IndicesValidator(ParamValidator[Tuple[int]]):\n def validate(self, indices: Tuple[int]) -> bool:\n for val in indices:\n if val < 0:\n return False\n return True\n indices_set = set(indices)\n if len(indices_set) != len(indices):\n return False\n return len(indices_set) != 0\n return IndicesValidator()\n\n", "meta": {"hash_id": "e6f756f2996140728e5c3aafd86d5727b685d3e2a337e9442d048a1ae5bd2de2"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/vectorslicer.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "How does an operator or UDF get access to the current epoch number?", "answer": "An operator or UDF that implements the EpochAware interface must define the setEpochSupplier method. This method takes a Supplier parameter which will supply the current epoch number when invoked. The comment explains: \"Operators or UDF implements this method would be provided with an supplier that provides the current rounds of the current element.\"", "golden_doc_uuids": ["883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd"], "golden_chunk_uuids": [["883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd", 1]], "golden_documents": [{"uuid": "883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.iteration.operator.allround;\n\nimport org.apache.flink.annotation.Internal;\n\nimport java.util.function.Supplier;\n\n/**\n * Operators or UDF implements this method would be provided with an supplier that provides the\n * current rounds of the current element.\n */\n@Internal\npublic interface EpochAware {\n\n void setEpochSupplier(Supplier epochSupplier);\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-iteration/flink-ml-iteration-common/src/main/java/org/apache/flink/iteration/operator/allround/EpochAware.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "47459cde86431d9bc0511871af2e6417629b834a9fda3a2bb307aeaf615d34b6"}}, {"doc_uuid": "883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd", "index": 1, "content": "package org.apache.flink.iteration.operator.allround;\n\nimport org.apache.flink.annotation.Internal;\n\nimport java.util.function.Supplier;\n\n/**\n * Operators or UDF implements this method would be provided with an supplier that provides the\n * current rounds of the current element.\n */\n@Internal\npublic interface EpochAware {\n\n void setEpochSupplier(Supplier epochSupplier);\n}\n", "meta": {"hash_id": "87fc96fc93f404a3b2dbb1cc905b5442f1f9ced7cd5f82a744e85a5cb065c4ba"}}]}], "golden_chunks": [{"doc_uuid": "883cd05fef37ca96b99dcde818574a8d83e19acd38638df12532709f6d7183fd", "index": 1, "content": "package org.apache.flink.iteration.operator.allround;\n\nimport org.apache.flink.annotation.Internal;\n\nimport java.util.function.Supplier;\n\n/**\n * Operators or UDF implements this method would be provided with an supplier that provides the\n * current rounds of the current element.\n */\n@Internal\npublic interface EpochAware {\n\n void setEpochSupplier(Supplier epochSupplier);\n}\n", "meta": {"hash_id": "87fc96fc93f404a3b2dbb1cc905b5442f1f9ced7cd5f82a744e85a5cb065c4ba"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-iteration/flink-ml-iteration-common/src/main/java/org/apache/flink/iteration/operator/allround/EpochAware.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "Are the vector and l2Norm fields mutable in the VectorWithNorm class?", "answer": "No, the `vector` and `l2Norm` fields in the `VectorWithNorm` class are declared as `public final`, which means they are public and cannot be reassigned once initialized in the constructor. This ensures that the `VectorWithNorm` objects are immutable once created.", "golden_doc_uuids": ["8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5"], "golden_chunk_uuids": [["8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5", 1]], "golden_documents": [{"uuid": "8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.\n */\n\npackage org.apache.flink.ml.linalg;\n\nimport org.apache.flink.api.common.typeinfo.TypeInfo;\nimport org.apache.flink.ml.linalg.typeinfo.VectorWithNormTypeInfoFactory;\n\n/** A vector with its norm. */\n@TypeInfo(VectorWithNormTypeInfoFactory.class)\npublic class VectorWithNorm {\n public final Vector vector;\n\n public final double l2Norm;\n\n public VectorWithNorm(Vector vector) {\n this(vector, BLAS.norm2(vector));\n }\n\n public VectorWithNorm(Vector vector, double l2Norm) {\n this.vector = vector;\n this.l2Norm = l2Norm;\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-servable-core/src/main/java/org/apache/flink/ml/linalg/VectorWithNorm.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.\n */\n\n", "meta": {"hash_id": "336a61bd7ce44ac9e08adb86d9530b7b2beadeb70a1d387733de7517a9b1ffb4"}}, {"doc_uuid": "8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5", "index": 1, "content": "package org.apache.flink.ml.linalg;\n\nimport org.apache.flink.api.common.typeinfo.TypeInfo;\nimport org.apache.flink.ml.linalg.typeinfo.VectorWithNormTypeInfoFactory;\n\n/** A vector with its norm. */\n@TypeInfo(VectorWithNormTypeInfoFactory.class)\npublic class VectorWithNorm {\n public final Vector vector;\n\n public final double l2Norm;\n\n public VectorWithNorm(Vector vector) {\n this(vector, BLAS.norm2(vector));\n }\n\n public VectorWithNorm(Vector vector, double l2Norm) {\n this.vector = vector;\n this.l2Norm = l2Norm;\n }\n}\n", "meta": {"hash_id": "fbb659e0008cb211bd956347ae907ecc92be1c7359f884a106be5f0c93bde288"}}]}], "golden_chunks": [{"doc_uuid": "8de4ca49de47a801aa268a6edb5afc9d1897e53a1a9772957719b5efe783cfc5", "index": 1, "content": "package org.apache.flink.ml.linalg;\n\nimport org.apache.flink.api.common.typeinfo.TypeInfo;\nimport org.apache.flink.ml.linalg.typeinfo.VectorWithNormTypeInfoFactory;\n\n/** A vector with its norm. */\n@TypeInfo(VectorWithNormTypeInfoFactory.class)\npublic class VectorWithNorm {\n public final Vector vector;\n\n public final double l2Norm;\n\n public VectorWithNorm(Vector vector) {\n this(vector, BLAS.norm2(vector));\n }\n\n public VectorWithNorm(Vector vector, double l2Norm) {\n this.vector = vector;\n this.l2Norm = l2Norm;\n }\n}\n", "meta": {"hash_id": "fbb659e0008cb211bd956347ae907ecc92be1c7359f884a106be5f0c93bde288"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-servable-core/src/main/java/org/apache/flink/ml/linalg/VectorWithNorm.java", "repo_name": "apache/flink-ml", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How do you set the parameters of the UnivariateFeatureSelector?", "answer": "You can set the parameters of the UnivariateFeatureSelector using the setter methods, such as:\n```python\nunivariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n```\nThis sets the features column to \"test_features\", label column to \"test_label\", output column to \"test_output\", feature type to \"continuous\", label type to \"categorical\", selection mode to \"fpr\", and selection threshold to 0.01.", "golden_doc_uuids": ["76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3"], "golden_chunk_uuids": [["76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", 7], ["76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", 1]], "golden_documents": [{"uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\nfrom typing import List\n\nfrom pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n (1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,\n 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)),\n (1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,\n 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)),\n (1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,\n 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)),\n (2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,\n 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)),\n (2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,\n 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)),\n (2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,\n 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)),\n (2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,\n 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)),\n (2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,\n 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)),\n (3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,\n 8.22809049e-01, 3.26739456e-01, 7.88268404e-01)),\n (3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,\n 7.33931213e-01, 1.42554396e-01, 7.11225605e-01)),\n (3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,\n 2.51532056e-01, 2.82729807e-01, 7.16245686e-01)),\n (3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,\n 5.93898886e-01, 5.68425656e-01, 8.49762330e-01)),\n (3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,\n 5.49682879e-01, 8.59931442e-01, 4.88677978e-02)),\n (4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,\n 9.23509168e-01, 1.16995043e-01, 5.91894106e-03)),\n (4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,\n 4.09421563e-01, 5.09375719e-01, 5.93157850e-01)),\n (4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,\n 3.62654055e-01, 2.43437468e-01, 7.13052838e-01)),\n (4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,\n 8.24123861e-01, 5.84074506e-01, 6.54461558e-01)),\n (4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,\n 5.16336729e-01, 9.99776159e-01, 3.15769738e-01)),\n ],\n type_info=Types.ROW_NAMED(\n ['label', 'features'],\n [Types.INT(), DenseVectorTypeInfo()])\n ))\n\n def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n univariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n self.assertEqual('test_features', univariate_feature_selector.features_col)\n self.assertEqual('test_label', univariate_feature_selector.label_col)\n self.assertEqual('test_output', univariate_feature_selector.output_col)\n self.assertEqual('continuous', univariate_feature_selector.feature_type)\n self.assertEqual('categorical', univariate_feature_selector.label_type)\n self.assertEqual('fpr', univariate_feature_selector.selection_mode)\n self.assertEqual(0.01, univariate_feature_selector.selection_threshold)\n\n def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n def test_fit_and_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n output = model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_get_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n model_data = model.get_model_data()[0]\n self.assertEqual(['indices'], model_data.get_schema().get_field_names())\n\n model_rows = [result for result in\n self.t_env.to_data_stream(model_data).execute_and_collect()]\n self.assertEqual(1, len(model_rows))\n self.assertListEqual([0, 2, 1], model_rows[0][0])\n\n def test_set_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model_a = selector.fit(self.input_table)\n model_data = model_a.get_model_data()[0]\n\n model_b = UnivariateFeatureSelectorModel() \\\n .set_model_data(model_data)\n update_existing_params(model_b, model_a)\n\n output = model_b.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_save_load_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n reloaded_selector = self.save_and_reload(selector)\n model = reloaded_selector.fit(self.input_table)\n reloaded_model = self.save_and_reload(model)\n output = reloaded_model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def verify_output_result(\n self, output: Table,\n field_names: List[str],\n feature_col: str,\n output_col: str,\n indices: List[int]):\n collected_results = [result for result in\n self.t_env.to_data_stream(output).execute_and_collect()]\n for item in collected_results:\n item.set_field_names(field_names)\n self.assertEqual(len(indices), item[output_col].size())\n for i in range(0, len(indices)):\n self.assertEqual(item[feature_col].get(indices[i]),\n item[output_col].get(i))\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/tests/test_univariatefeatureselector.py", "repo_name": "apache/flink-ml", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 0, "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\nfrom typing import List\n\n", "meta": {"hash_id": "b31962033fa586825bf7dc1108a65126d2752cd6392cf54a36062da92f2e3a6f"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 1, "content": "from pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n", "meta": {"hash_id": "3f07304e3da358d44e8b2398f46a4c6857e611cefcce9097c6e8dba5c37387ad"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 2, "content": " (1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,\n 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)),\n (1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,\n 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)),\n (1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,\n 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)),\n", "meta": {"hash_id": "f48261532f9d7b96723c5162e8e5a1d4035b2e2e02dab27ac4c892609051bb8c"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 3, "content": " (2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,\n 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)),\n (2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,\n 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)),\n (2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,\n 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)),\n (2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,\n 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)),\n", "meta": {"hash_id": "81ecfb1423426e5d39b62ed71b6951e7811e5cb955e27f44ad75cbb782205da8"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 4, "content": " (2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,\n 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)),\n (3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,\n 8.22809049e-01, 3.26739456e-01, 7.88268404e-01)),\n (3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,\n 7.33931213e-01, 1.42554396e-01, 7.11225605e-01)),\n (3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,\n 2.51532056e-01, 2.82729807e-01, 7.16245686e-01)),\n", "meta": {"hash_id": "1443122eb758620816d890e6249989171d635d85bf53dc557b9677305cc75747"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 5, "content": " (3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,\n 5.93898886e-01, 5.68425656e-01, 8.49762330e-01)),\n (3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,\n 5.49682879e-01, 8.59931442e-01, 4.88677978e-02)),\n (4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,\n 9.23509168e-01, 1.16995043e-01, 5.91894106e-03)),\n (4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,\n 4.09421563e-01, 5.09375719e-01, 5.93157850e-01)),\n (4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,\n 3.62654055e-01, 2.43437468e-01, 7.13052838e-01)),\n", "meta": {"hash_id": "1eab267ee9a593a0116c4325aa9379bb3f0103a3badfab05e865e8d0cb90a151"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 6, "content": " (4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,\n 8.24123861e-01, 5.84074506e-01, 6.54461558e-01)),\n (4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,\n 5.16336729e-01, 9.99776159e-01, 3.15769738e-01)),\n ],\n type_info=Types.ROW_NAMED(\n ['label', 'features'],\n [Types.INT(), DenseVectorTypeInfo()])\n ))\n\n", "meta": {"hash_id": "facd1f797a4456f1d7adae5401aa5aa857c56d9ddbed51f547dd1ac12a825301"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 7, "content": " def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n", "meta": {"hash_id": "62f4fd05184e743e9e760f17bdadc21d6471c3c318f1ad97a79be31b74b86df9"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 8, "content": " univariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n self.assertEqual('test_features', univariate_feature_selector.features_col)\n self.assertEqual('test_label', univariate_feature_selector.label_col)\n self.assertEqual('test_output', univariate_feature_selector.output_col)\n self.assertEqual('continuous', univariate_feature_selector.feature_type)\n self.assertEqual('categorical', univariate_feature_selector.label_type)\n self.assertEqual('fpr', univariate_feature_selector.selection_mode)\n self.assertEqual(0.01, univariate_feature_selector.selection_threshold)\n\n", "meta": {"hash_id": "3bd81eb8dd6590cc3493d64654d0b31f91a8bfdf6b16e15b20107b0ef921c266"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 9, "content": " def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n", "meta": {"hash_id": "257b4f77605821d18e741445035c790c35df0183fe727622f8bc169e74f132b9"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 10, "content": " def test_fit_and_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n output = model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_get_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n model_data = model.get_model_data()[0]\n self.assertEqual(['indices'], model_data.get_schema().get_field_names())\n\n", "meta": {"hash_id": "f8d247c6a56dbd6270e3324abfdea68d84baac2983dbb57f0c03b70d4bef92ef"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 11, "content": " model_rows = [result for result in\n self.t_env.to_data_stream(model_data).execute_and_collect()]\n self.assertEqual(1, len(model_rows))\n self.assertListEqual([0, 2, 1], model_rows[0][0])\n\n def test_set_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model_a = selector.fit(self.input_table)\n model_data = model_a.get_model_data()[0]\n\n model_b = UnivariateFeatureSelectorModel() \\\n .set_model_data(model_data)\n update_existing_params(model_b, model_a)\n\n output = model_b.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n", "meta": {"hash_id": "3fd71881b39750005da2065ac81a568de876f875d8677b80c82734f67cffecf9"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 12, "content": " def test_save_load_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n reloaded_selector = self.save_and_reload(selector)\n model = reloaded_selector.fit(self.input_table)\n reloaded_model = self.save_and_reload(model)\n output = reloaded_model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n", "meta": {"hash_id": "05af33323e17ca072ce38eea36591fe9e3e569536e5025fade1eadc986f5d46a"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 13, "content": " def verify_output_result(\n self, output: Table,\n field_names: List[str],\n feature_col: str,\n output_col: str,\n indices: List[int]):\n collected_results = [result for result in\n self.t_env.to_data_stream(output).execute_and_collect()]\n for item in collected_results:\n item.set_field_names(field_names)\n self.assertEqual(len(indices), item[output_col].size())\n for i in range(0, len(indices)):\n self.assertEqual(item[feature_col].get(indices[i]),\n item[output_col].get(i))\n", "meta": {"hash_id": "ba4d7086a48c57435fe297439d7a76b53432a21c91ee54a71666a5e57e31ce5b"}}]}], "golden_chunks": [{"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 7, "content": " def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n", "meta": {"hash_id": "62f4fd05184e743e9e760f17bdadc21d6471c3c318f1ad97a79be31b74b86df9"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 1, "content": "from pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n", "meta": {"hash_id": "3f07304e3da358d44e8b2398f46a4c6857e611cefcce9097c6e8dba5c37387ad"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/tests/test_univariatefeatureselector.py", "repo_name": "apache/flink-ml", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "What happens if you don't set the feature_type or label_type parameters of the UnivariateFeatureSelector?", "answer": "If you don't set the feature_type or label_type parameters, an exception will be raised when trying to access them. As shown in the test code:\n```python\nwith self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\nwith self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n```\nSo these parameters must be set before using the UnivariateFeatureSelector.", "golden_doc_uuids": ["76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3"], "golden_chunk_uuids": [["76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", 7]], "golden_documents": [{"uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\nfrom typing import List\n\nfrom pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n (1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,\n 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)),\n (1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,\n 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)),\n (1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,\n 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)),\n (2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,\n 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)),\n (2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,\n 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)),\n (2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,\n 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)),\n (2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,\n 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)),\n (2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,\n 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)),\n (3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,\n 8.22809049e-01, 3.26739456e-01, 7.88268404e-01)),\n (3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,\n 7.33931213e-01, 1.42554396e-01, 7.11225605e-01)),\n (3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,\n 2.51532056e-01, 2.82729807e-01, 7.16245686e-01)),\n (3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,\n 5.93898886e-01, 5.68425656e-01, 8.49762330e-01)),\n (3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,\n 5.49682879e-01, 8.59931442e-01, 4.88677978e-02)),\n (4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,\n 9.23509168e-01, 1.16995043e-01, 5.91894106e-03)),\n (4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,\n 4.09421563e-01, 5.09375719e-01, 5.93157850e-01)),\n (4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,\n 3.62654055e-01, 2.43437468e-01, 7.13052838e-01)),\n (4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,\n 8.24123861e-01, 5.84074506e-01, 6.54461558e-01)),\n (4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,\n 5.16336729e-01, 9.99776159e-01, 3.15769738e-01)),\n ],\n type_info=Types.ROW_NAMED(\n ['label', 'features'],\n [Types.INT(), DenseVectorTypeInfo()])\n ))\n\n def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n univariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n self.assertEqual('test_features', univariate_feature_selector.features_col)\n self.assertEqual('test_label', univariate_feature_selector.label_col)\n self.assertEqual('test_output', univariate_feature_selector.output_col)\n self.assertEqual('continuous', univariate_feature_selector.feature_type)\n self.assertEqual('categorical', univariate_feature_selector.label_type)\n self.assertEqual('fpr', univariate_feature_selector.selection_mode)\n self.assertEqual(0.01, univariate_feature_selector.selection_threshold)\n\n def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n def test_fit_and_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n output = model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_get_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n model_data = model.get_model_data()[0]\n self.assertEqual(['indices'], model_data.get_schema().get_field_names())\n\n model_rows = [result for result in\n self.t_env.to_data_stream(model_data).execute_and_collect()]\n self.assertEqual(1, len(model_rows))\n self.assertListEqual([0, 2, 1], model_rows[0][0])\n\n def test_set_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model_a = selector.fit(self.input_table)\n model_data = model_a.get_model_data()[0]\n\n model_b = UnivariateFeatureSelectorModel() \\\n .set_model_data(model_data)\n update_existing_params(model_b, model_a)\n\n output = model_b.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_save_load_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n reloaded_selector = self.save_and_reload(selector)\n model = reloaded_selector.fit(self.input_table)\n reloaded_model = self.save_and_reload(model)\n output = reloaded_model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def verify_output_result(\n self, output: Table,\n field_names: List[str],\n feature_col: str,\n output_col: str,\n indices: List[int]):\n collected_results = [result for result in\n self.t_env.to_data_stream(output).execute_and_collect()]\n for item in collected_results:\n item.set_field_names(field_names)\n self.assertEqual(len(indices), item[output_col].size())\n for i in range(0, len(indices)):\n self.assertEqual(item[feature_col].get(indices[i]),\n item[output_col].get(i))\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/tests/test_univariatefeatureselector.py", "repo_name": "apache/flink-ml", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 0, "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\nfrom typing import List\n\n", "meta": {"hash_id": "b31962033fa586825bf7dc1108a65126d2752cd6392cf54a36062da92f2e3a6f"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 1, "content": "from pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n", "meta": {"hash_id": "3f07304e3da358d44e8b2398f46a4c6857e611cefcce9097c6e8dba5c37387ad"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 2, "content": " (1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,\n 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)),\n (1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,\n 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)),\n (1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,\n 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)),\n", "meta": {"hash_id": "f48261532f9d7b96723c5162e8e5a1d4035b2e2e02dab27ac4c892609051bb8c"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 3, "content": " (2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,\n 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)),\n (2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,\n 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)),\n (2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,\n 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)),\n (2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,\n 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)),\n", "meta": {"hash_id": "81ecfb1423426e5d39b62ed71b6951e7811e5cb955e27f44ad75cbb782205da8"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 4, "content": " (2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,\n 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)),\n (3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,\n 8.22809049e-01, 3.26739456e-01, 7.88268404e-01)),\n (3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,\n 7.33931213e-01, 1.42554396e-01, 7.11225605e-01)),\n (3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,\n 2.51532056e-01, 2.82729807e-01, 7.16245686e-01)),\n", "meta": {"hash_id": "1443122eb758620816d890e6249989171d635d85bf53dc557b9677305cc75747"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 5, "content": " (3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,\n 5.93898886e-01, 5.68425656e-01, 8.49762330e-01)),\n (3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,\n 5.49682879e-01, 8.59931442e-01, 4.88677978e-02)),\n (4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,\n 9.23509168e-01, 1.16995043e-01, 5.91894106e-03)),\n (4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,\n 4.09421563e-01, 5.09375719e-01, 5.93157850e-01)),\n (4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,\n 3.62654055e-01, 2.43437468e-01, 7.13052838e-01)),\n", "meta": {"hash_id": "1eab267ee9a593a0116c4325aa9379bb3f0103a3badfab05e865e8d0cb90a151"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 6, "content": " (4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,\n 8.24123861e-01, 5.84074506e-01, 6.54461558e-01)),\n (4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,\n 5.16336729e-01, 9.99776159e-01, 3.15769738e-01)),\n ],\n type_info=Types.ROW_NAMED(\n ['label', 'features'],\n [Types.INT(), DenseVectorTypeInfo()])\n ))\n\n", "meta": {"hash_id": "facd1f797a4456f1d7adae5401aa5aa857c56d9ddbed51f547dd1ac12a825301"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 7, "content": " def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n", "meta": {"hash_id": "62f4fd05184e743e9e760f17bdadc21d6471c3c318f1ad97a79be31b74b86df9"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 8, "content": " univariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n self.assertEqual('test_features', univariate_feature_selector.features_col)\n self.assertEqual('test_label', univariate_feature_selector.label_col)\n self.assertEqual('test_output', univariate_feature_selector.output_col)\n self.assertEqual('continuous', univariate_feature_selector.feature_type)\n self.assertEqual('categorical', univariate_feature_selector.label_type)\n self.assertEqual('fpr', univariate_feature_selector.selection_mode)\n self.assertEqual(0.01, univariate_feature_selector.selection_threshold)\n\n", "meta": {"hash_id": "3bd81eb8dd6590cc3493d64654d0b31f91a8bfdf6b16e15b20107b0ef921c266"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 9, "content": " def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n", "meta": {"hash_id": "257b4f77605821d18e741445035c790c35df0183fe727622f8bc169e74f132b9"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 10, "content": " def test_fit_and_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n output = model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_get_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n model_data = model.get_model_data()[0]\n self.assertEqual(['indices'], model_data.get_schema().get_field_names())\n\n", "meta": {"hash_id": "f8d247c6a56dbd6270e3324abfdea68d84baac2983dbb57f0c03b70d4bef92ef"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 11, "content": " model_rows = [result for result in\n self.t_env.to_data_stream(model_data).execute_and_collect()]\n self.assertEqual(1, len(model_rows))\n self.assertListEqual([0, 2, 1], model_rows[0][0])\n\n def test_set_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model_a = selector.fit(self.input_table)\n model_data = model_a.get_model_data()[0]\n\n model_b = UnivariateFeatureSelectorModel() \\\n .set_model_data(model_data)\n update_existing_params(model_b, model_a)\n\n output = model_b.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n", "meta": {"hash_id": "3fd71881b39750005da2065ac81a568de876f875d8677b80c82734f67cffecf9"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 12, "content": " def test_save_load_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n reloaded_selector = self.save_and_reload(selector)\n model = reloaded_selector.fit(self.input_table)\n reloaded_model = self.save_and_reload(model)\n output = reloaded_model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n", "meta": {"hash_id": "05af33323e17ca072ce38eea36591fe9e3e569536e5025fade1eadc986f5d46a"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 13, "content": " def verify_output_result(\n self, output: Table,\n field_names: List[str],\n feature_col: str,\n output_col: str,\n indices: List[int]):\n collected_results = [result for result in\n self.t_env.to_data_stream(output).execute_and_collect()]\n for item in collected_results:\n item.set_field_names(field_names)\n self.assertEqual(len(indices), item[output_col].size())\n for i in range(0, len(indices)):\n self.assertEqual(item[feature_col].get(indices[i]),\n item[output_col].get(i))\n", "meta": {"hash_id": "ba4d7086a48c57435fe297439d7a76b53432a21c91ee54a71666a5e57e31ce5b"}}]}], "golden_chunks": [{"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 7, "content": " def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n", "meta": {"hash_id": "62f4fd05184e743e9e760f17bdadc21d6471c3c318f1ad97a79be31b74b86df9"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/tests/test_univariatefeatureselector.py", "repo_name": "apache/flink-ml", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "What is the output schema of the UnivariateFeatureSelectorModel after transforming data?", "answer": "After transforming data using the UnivariateFeatureSelectorModel, the output schema will contain the original label column, features column, and the new output column specified by the output_col parameter. This is verified in the test code:\n```python\nselector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical') \nmodel = selector.fit(temp_table)\noutput = model.transform(temp_table)[0]\nself.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n```", "golden_doc_uuids": ["76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3"], "golden_chunk_uuids": [["76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", 9]], "golden_documents": [{"uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\nfrom typing import List\n\nfrom pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n (1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,\n 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)),\n (1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,\n 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)),\n (1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,\n 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)),\n (2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,\n 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)),\n (2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,\n 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)),\n (2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,\n 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)),\n (2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,\n 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)),\n (2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,\n 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)),\n (3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,\n 8.22809049e-01, 3.26739456e-01, 7.88268404e-01)),\n (3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,\n 7.33931213e-01, 1.42554396e-01, 7.11225605e-01)),\n (3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,\n 2.51532056e-01, 2.82729807e-01, 7.16245686e-01)),\n (3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,\n 5.93898886e-01, 5.68425656e-01, 8.49762330e-01)),\n (3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,\n 5.49682879e-01, 8.59931442e-01, 4.88677978e-02)),\n (4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,\n 9.23509168e-01, 1.16995043e-01, 5.91894106e-03)),\n (4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,\n 4.09421563e-01, 5.09375719e-01, 5.93157850e-01)),\n (4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,\n 3.62654055e-01, 2.43437468e-01, 7.13052838e-01)),\n (4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,\n 8.24123861e-01, 5.84074506e-01, 6.54461558e-01)),\n (4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,\n 5.16336729e-01, 9.99776159e-01, 3.15769738e-01)),\n ],\n type_info=Types.ROW_NAMED(\n ['label', 'features'],\n [Types.INT(), DenseVectorTypeInfo()])\n ))\n\n def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n univariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n self.assertEqual('test_features', univariate_feature_selector.features_col)\n self.assertEqual('test_label', univariate_feature_selector.label_col)\n self.assertEqual('test_output', univariate_feature_selector.output_col)\n self.assertEqual('continuous', univariate_feature_selector.feature_type)\n self.assertEqual('categorical', univariate_feature_selector.label_type)\n self.assertEqual('fpr', univariate_feature_selector.selection_mode)\n self.assertEqual(0.01, univariate_feature_selector.selection_threshold)\n\n def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n def test_fit_and_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n output = model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_get_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n model_data = model.get_model_data()[0]\n self.assertEqual(['indices'], model_data.get_schema().get_field_names())\n\n model_rows = [result for result in\n self.t_env.to_data_stream(model_data).execute_and_collect()]\n self.assertEqual(1, len(model_rows))\n self.assertListEqual([0, 2, 1], model_rows[0][0])\n\n def test_set_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model_a = selector.fit(self.input_table)\n model_data = model_a.get_model_data()[0]\n\n model_b = UnivariateFeatureSelectorModel() \\\n .set_model_data(model_data)\n update_existing_params(model_b, model_a)\n\n output = model_b.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_save_load_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n reloaded_selector = self.save_and_reload(selector)\n model = reloaded_selector.fit(self.input_table)\n reloaded_model = self.save_and_reload(model)\n output = reloaded_model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def verify_output_result(\n self, output: Table,\n field_names: List[str],\n feature_col: str,\n output_col: str,\n indices: List[int]):\n collected_results = [result for result in\n self.t_env.to_data_stream(output).execute_and_collect()]\n for item in collected_results:\n item.set_field_names(field_names)\n self.assertEqual(len(indices), item[output_col].size())\n for i in range(0, len(indices)):\n self.assertEqual(item[feature_col].get(indices[i]),\n item[output_col].get(i))\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/tests/test_univariatefeatureselector.py", "repo_name": "apache/flink-ml", "num_chunks": 14, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 0, "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\nfrom typing import List\n\n", "meta": {"hash_id": "b31962033fa586825bf7dc1108a65126d2752cd6392cf54a36062da92f2e3a6f"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 1, "content": "from pyflink.common import Types\nfrom pyflink.ml.tests.test_utils import PyFlinkMLTestCase, update_existing_params\n\nfrom pyflink.ml.linalg import DenseVectorTypeInfo, Vectors\n\nfrom pyflink.ml.feature.univariatefeatureselector import UnivariateFeatureSelector, \\\n UnivariateFeatureSelectorModel\nfrom pyflink.table import Table\n\n\nclass UnivariateFeatureSelectorTest(PyFlinkMLTestCase):\n\n def setUp(self):\n super(UnivariateFeatureSelectorTest, self).setUp()\n self.input_table = self.t_env.from_data_stream(\n self.env.from_collection([\n (1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,\n 1.61408773e-01, 3.92492111e-01, 7.31240882e-01)),\n (1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,\n 7.02154563e-01, 6.21348351e-01, 1.88397353e-01)),\n", "meta": {"hash_id": "3f07304e3da358d44e8b2398f46a4c6857e611cefcce9097c6e8dba5c37387ad"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 2, "content": " (1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,\n 8.40043971e-01, 7.45977857e-01, 8.78402288e-01)),\n (1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,\n 6.14948000e-01, 7.44948187e-01, 9.74034830e-01)),\n (1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,\n 2.33408080e-01, 1.86340919e-01, 8.23390433e-01)),\n", "meta": {"hash_id": "f48261532f9d7b96723c5162e8e5a1d4035b2e2e02dab27ac4c892609051bb8c"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 3, "content": " (2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,\n 2.35184013e-01, 3.46668895e-01, 9.38500782e-02)),\n (2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,\n 9.09979851e-01, 6.80257535e-02, 8.24017480e-01)),\n (2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,\n 3.49784755e-01, 1.71322408e-02, 7.48465194e-02)),\n (2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,\n 2.31253009e-01, 7.46270968e-01, 1.60308169e-01)),\n", "meta": {"hash_id": "81ecfb1423426e5d39b62ed71b6951e7811e5cb955e27f44ad75cbb782205da8"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 4, "content": " (2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,\n 8.92482744e-02, 1.42651730e-01, 7.66751625e-01)),\n (3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,\n 8.22809049e-01, 3.26739456e-01, 7.88268404e-01)),\n (3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,\n 7.33931213e-01, 1.42554396e-01, 7.11225605e-01)),\n (3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,\n 2.51532056e-01, 2.82729807e-01, 7.16245686e-01)),\n", "meta": {"hash_id": "1443122eb758620816d890e6249989171d635d85bf53dc557b9677305cc75747"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 5, "content": " (3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,\n 5.93898886e-01, 5.68425656e-01, 8.49762330e-01)),\n (3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,\n 5.49682879e-01, 8.59931442e-01, 4.88677978e-02)),\n (4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,\n 9.23509168e-01, 1.16995043e-01, 5.91894106e-03)),\n (4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,\n 4.09421563e-01, 5.09375719e-01, 5.93157850e-01)),\n (4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,\n 3.62654055e-01, 2.43437468e-01, 7.13052838e-01)),\n", "meta": {"hash_id": "1eab267ee9a593a0116c4325aa9379bb3f0103a3badfab05e865e8d0cb90a151"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 6, "content": " (4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,\n 8.24123861e-01, 5.84074506e-01, 6.54461558e-01)),\n (4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,\n 5.16336729e-01, 9.99776159e-01, 3.15769738e-01)),\n ],\n type_info=Types.ROW_NAMED(\n ['label', 'features'],\n [Types.INT(), DenseVectorTypeInfo()])\n ))\n\n", "meta": {"hash_id": "facd1f797a4456f1d7adae5401aa5aa857c56d9ddbed51f547dd1ac12a825301"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 7, "content": " def test_param(self):\n univariate_feature_selector = UnivariateFeatureSelector()\n self.assertEqual('features', univariate_feature_selector.features_col)\n self.assertEqual('label', univariate_feature_selector.label_col)\n self.assertEqual('output', univariate_feature_selector.output_col)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.feature_type\n self.assertTrue(\"Parameter featureType's value should not be null\" in context.exception)\n with self.assertRaises(Exception) as context:\n univariate_feature_selector.label_type\n self.assertTrue(\"Parameter labelType's value should not be null\" in context.exception)\n self.assertEqual('numTopFeatures', univariate_feature_selector.selection_mode)\n self.assertIsNone(univariate_feature_selector.selection_threshold)\n\n", "meta": {"hash_id": "62f4fd05184e743e9e760f17bdadc21d6471c3c318f1ad97a79be31b74b86df9"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 8, "content": " univariate_feature_selector\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\\\n .set_selection_mode('fpr')\\\n .set_selection_threshold(0.01)\n self.assertEqual('test_features', univariate_feature_selector.features_col)\n self.assertEqual('test_label', univariate_feature_selector.label_col)\n self.assertEqual('test_output', univariate_feature_selector.output_col)\n self.assertEqual('continuous', univariate_feature_selector.feature_type)\n self.assertEqual('categorical', univariate_feature_selector.label_type)\n self.assertEqual('fpr', univariate_feature_selector.selection_mode)\n self.assertEqual(0.01, univariate_feature_selector.selection_threshold)\n\n", "meta": {"hash_id": "3bd81eb8dd6590cc3493d64654d0b31f91a8bfdf6b16e15b20107b0ef921c266"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 9, "content": " def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n", "meta": {"hash_id": "257b4f77605821d18e741445035c790c35df0183fe727622f8bc169e74f132b9"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 10, "content": " def test_fit_and_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n output = model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n def test_get_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model = selector.fit(self.input_table)\n model_data = model.get_model_data()[0]\n self.assertEqual(['indices'], model_data.get_schema().get_field_names())\n\n", "meta": {"hash_id": "f8d247c6a56dbd6270e3324abfdea68d84baac2983dbb57f0c03b70d4bef92ef"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 11, "content": " model_rows = [result for result in\n self.t_env.to_data_stream(model_data).execute_and_collect()]\n self.assertEqual(1, len(model_rows))\n self.assertListEqual([0, 2, 1], model_rows[0][0])\n\n def test_set_model_data(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n model_a = selector.fit(self.input_table)\n model_data = model_a.get_model_data()[0]\n\n model_b = UnivariateFeatureSelectorModel() \\\n .set_model_data(model_data)\n update_existing_params(model_b, model_a)\n\n output = model_b.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n", "meta": {"hash_id": "3fd71881b39750005da2065ac81a568de876f875d8677b80c82734f67cffecf9"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 12, "content": " def test_save_load_predict(self):\n selector = UnivariateFeatureSelector() \\\n .set_feature_type('continuous') \\\n .set_label_type('categorical') \\\n .set_selection_threshold(3)\n reloaded_selector = self.save_and_reload(selector)\n model = reloaded_selector.fit(self.input_table)\n reloaded_model = self.save_and_reload(model)\n output = reloaded_model.transform(self.input_table)[0]\n self.verify_output_result(\n output,\n output.get_schema().get_field_names(),\n selector.get_features_col(),\n selector.get_output_col(),\n [0, 1, 2])\n\n", "meta": {"hash_id": "05af33323e17ca072ce38eea36591fe9e3e569536e5025fade1eadc986f5d46a"}}, {"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 13, "content": " def verify_output_result(\n self, output: Table,\n field_names: List[str],\n feature_col: str,\n output_col: str,\n indices: List[int]):\n collected_results = [result for result in\n self.t_env.to_data_stream(output).execute_and_collect()]\n for item in collected_results:\n item.set_field_names(field_names)\n self.assertEqual(len(indices), item[output_col].size())\n for i in range(0, len(indices)):\n self.assertEqual(item[feature_col].get(indices[i]),\n item[output_col].get(i))\n", "meta": {"hash_id": "ba4d7086a48c57435fe297439d7a76b53432a21c91ee54a71666a5e57e31ce5b"}}]}], "golden_chunks": [{"doc_uuid": "76ef0394d7b3d82a00d0fa83f34874957a0b215a66e3ceede34f013d0f607da3", "index": 9, "content": " def test_output_schema(self):\n selector = UnivariateFeatureSelector()\\\n .set_features_col(\"test_features\")\\\n .set_label_col('test_label')\\\n .set_output_col('test_output')\\\n .set_feature_type('continuous')\\\n .set_label_type('categorical')\n temp_table = self.input_table.alias('test_label', 'test_features')\n model = selector.fit(temp_table)\n output = model.transform(temp_table)[0]\n self.assertEqual(\n ['test_label', 'test_features', 'test_output'],\n output.get_schema().get_field_names())\n\n", "meta": {"hash_id": "257b4f77605821d18e741445035c790c35df0183fe727622f8bc169e74f132b9"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/ml/feature/tests/test_univariatefeatureselector.py", "repo_name": "apache/flink-ml", "num_chunks": 14, "source": "codebases/jsonl"}} +{"query": "How does the testParam() method verify the parameter settings of LinearRegression?", "answer": "The testParam() method creates a LinearRegression instance, checks the default parameter values, then sets new values for each parameter using the setter methods. It verifies that the updated parameter values are correctly set by calling the corresponding getter methods and comparing the returned values to the expected values, using assertions like assertEquals().", "golden_doc_uuids": ["05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7"], "golden_chunk_uuids": [["05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", 6]], "golden_documents": [{"uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.regression;\n\nimport org.apache.flink.api.common.typeinfo.TypeInformation;\nimport org.apache.flink.api.common.typeinfo.Types;\nimport org.apache.flink.api.java.typeutils.RowTypeInfo;\nimport org.apache.flink.ml.linalg.SparseVector;\nimport org.apache.flink.ml.linalg.Vectors;\nimport org.apache.flink.ml.linalg.typeinfo.DenseVectorTypeInfo;\nimport org.apache.flink.ml.regression.linearregression.LinearRegression;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModel;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModelData;\nimport org.apache.flink.ml.util.ParamUtils;\nimport org.apache.flink.ml.util.TestUtils;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.test.util.AbstractTestBase;\nimport org.apache.flink.types.Row;\n\nimport org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.lang3.RandomUtils;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\n/** Tests {@link LinearRegression} and {@link LinearRegressionModel}. */\npublic class LinearRegressionTest extends AbstractTestBase {\n\n @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();\n\n private StreamExecutionEnvironment env;\n\n private StreamTableEnvironment tEnv;\n\n private static final List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(2, 4), 10.0, 1.0),\n Row.of(Vectors.dense(2, 2), 6.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(1, 2), 5.0, 1.0),\n Row.of(Vectors.dense(5, 3), 11.0, 1.0));\n\n private static final double[] expectedCoefficient = new double[] {1.141, 1.829};\n\n private static final double TOLERANCE = 1e-7;\n\n private static final double PREDICTION_TOLERANCE = 0.1;\n\n private static final double COEFFICIENT_TOLERANCE = 0.1;\n\n private Table trainDataTable;\n\n @Before\n public void before() {\n env = TestUtils.getExecutionEnvironment();\n tEnv = StreamTableEnvironment.create(env);\n Collections.shuffle(trainData);\n trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n }\n\n @SuppressWarnings(\"unchecked\")\n private void verifyPredictionResult(Table output, String labelCol, String predictionCol)\n throws Exception {\n List predResult = IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect());\n for (Row predictionRow : predResult) {\n double label = ((Number) predictionRow.getField(labelCol)).doubleValue();\n double prediction = (double) predictionRow.getField(predictionCol);\n assertTrue(Math.abs(prediction - label) / label < PREDICTION_TOLERANCE);\n }\n }\n\n @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n linearRegression\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setMaxIter(1000)\n .setTol(0.001)\n .setLearningRate(0.5)\n .setGlobalBatchSize(1000)\n .setReg(0.1)\n .setElasticNet(0.5)\n .setPredictionCol(\"test_predictionCol\");\n assertEquals(\"test_features\", linearRegression.getFeaturesCol());\n assertEquals(\"test_label\", linearRegression.getLabelCol());\n assertEquals(\"test_weight\", linearRegression.getWeightCol());\n assertEquals(1000, linearRegression.getMaxIter());\n assertEquals(0.001, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.5, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(1000, linearRegression.getGlobalBatchSize());\n assertEquals(0.1, linearRegression.getReg(), TOLERANCE);\n assertEquals(0.5, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"test_predictionCol\", linearRegression.getPredictionCol());\n }\n\n @Test\n public void testOutputSchema() {\n Table tempTable = trainDataTable.as(\"test_features\", \"test_label\", \"test_weight\");\n LinearRegression linearRegression =\n new LinearRegression()\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setPredictionCol(\"test_predictionCol\");\n Table output = linearRegression.fit(trainDataTable).transform(tempTable)[0];\n assertEquals(\n Arrays.asList(\"test_features\", \"test_label\", \"test_weight\", \"test_predictionCol\"),\n output.getResolvedSchema().getColumnNames());\n }\n\n @Test\n public void testFitAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testSaveLoadAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n linearRegression =\n TestUtils.saveAndReload(\n tEnv,\n linearRegression,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegression::load);\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n model =\n TestUtils.saveAndReload(\n tEnv,\n model,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegressionModel::load);\n assertEquals(\n Collections.singletonList(\"coefficient\"),\n model.getModelData()[0].getResolvedSchema().getColumnNames());\n Table output = model.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testGetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n assertNotNull(modelData);\n assertEquals(1, modelData.size());\n assertArrayEquals(\n expectedCoefficient, modelData.get(0).coefficient.values, COEFFICIENT_TOLERANCE);\n }\n\n @Test\n public void testSetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n LinearRegressionModel newModel = new LinearRegressionModel();\n ParamUtils.updateExistingParams(newModel, model.getParamMap());\n newModel.setModelData(model.getModelData());\n Table output = newModel.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testMoreSubtaskThanData() throws Exception {\n List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0));\n\n Table trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n\n LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n @SuppressWarnings(\"unchecked\")\n private void checkRegularization(double reg, double elasticNet, double[] expectedCoefficient)\n throws Exception {\n LinearRegressionModel model =\n new LinearRegression()\n .setWeightCol(\"weight\")\n .setReg(reg)\n .setElasticNet(elasticNet)\n .fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n final double errorTol = 1e-3;\n assertArrayEquals(expectedCoefficient, modelData.get(0).coefficient.values, errorTol);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-lib/src/test/java/org/apache/flink/ml/regression/LinearRegressionTest.java", "repo_name": "apache/flink-ml", "num_chunks": 19, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.regression;\n\n", "meta": {"hash_id": "f1f074c0fd078158148d151501d7946111aa5beae4ca844db35bd1a560aa6899"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 1, "content": "import org.apache.flink.api.common.typeinfo.TypeInformation;\nimport org.apache.flink.api.common.typeinfo.Types;\nimport org.apache.flink.api.java.typeutils.RowTypeInfo;\nimport org.apache.flink.ml.linalg.SparseVector;\nimport org.apache.flink.ml.linalg.Vectors;\nimport org.apache.flink.ml.linalg.typeinfo.DenseVectorTypeInfo;\nimport org.apache.flink.ml.regression.linearregression.LinearRegression;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModel;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModelData;\nimport org.apache.flink.ml.util.ParamUtils;\nimport org.apache.flink.ml.util.TestUtils;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.test.util.AbstractTestBase;\nimport org.apache.flink.types.Row;\n\n", "meta": {"hash_id": "4fd56a8bc9c186e18aeec2f650a620885587404ed33f5daca9c705369fcff695"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 2, "content": "import org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.lang3.RandomUtils;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\n/** Tests {@link LinearRegression} and {@link LinearRegressionModel}. */\npublic class LinearRegressionTest extends AbstractTestBase {\n\n @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();\n\n private StreamExecutionEnvironment env;\n\n private StreamTableEnvironment tEnv;\n\n", "meta": {"hash_id": "d16579e4e457a704813821d7ac4bd4c402fd8149142cdc3c04eef5956de4b5c7"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 3, "content": " private static final List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(2, 4), 10.0, 1.0),\n Row.of(Vectors.dense(2, 2), 6.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(1, 2), 5.0, 1.0),\n Row.of(Vectors.dense(5, 3), 11.0, 1.0));\n\n private static final double[] expectedCoefficient = new double[] {1.141, 1.829};\n\n private static final double TOLERANCE = 1e-7;\n\n private static final double PREDICTION_TOLERANCE = 0.1;\n\n private static final double COEFFICIENT_TOLERANCE = 0.1;\n\n private Table trainDataTable;\n\n", "meta": {"hash_id": "26b71b5090239d240d373018cb847aee96d45794086a3eb62cca7bfc39c24815"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 4, "content": " @Before\n public void before() {\n env = TestUtils.getExecutionEnvironment();\n tEnv = StreamTableEnvironment.create(env);\n Collections.shuffle(trainData);\n trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n }\n\n", "meta": {"hash_id": "1726bfc5a0db30210e196b06f06052440399e3d3b7a8cd02d271316953bfec56"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 5, "content": " @SuppressWarnings(\"unchecked\")\n private void verifyPredictionResult(Table output, String labelCol, String predictionCol)\n throws Exception {\n List predResult = IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect());\n for (Row predictionRow : predResult) {\n double label = ((Number) predictionRow.getField(labelCol)).doubleValue();\n double prediction = (double) predictionRow.getField(predictionCol);\n assertTrue(Math.abs(prediction - label) / label < PREDICTION_TOLERANCE);\n }\n }\n\n", "meta": {"hash_id": "505d0ab105dc894e438c5253a226b485d17023f16e74f258ea680563b1bb3ed8"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 6, "content": " @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n", "meta": {"hash_id": "d781e76635abe0b93a702188b7240e31994bec50a525815a9eea0081345169eb"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 7, "content": " linearRegression\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setMaxIter(1000)\n .setTol(0.001)\n .setLearningRate(0.5)\n .setGlobalBatchSize(1000)\n .setReg(0.1)\n .setElasticNet(0.5)\n .setPredictionCol(\"test_predictionCol\");\n assertEquals(\"test_features\", linearRegression.getFeaturesCol());\n assertEquals(\"test_label\", linearRegression.getLabelCol());\n assertEquals(\"test_weight\", linearRegression.getWeightCol());\n", "meta": {"hash_id": "8064169d6f95a943476903ff3cf7c41f3b964a4dbb978d0eb4004ce0fc24ae28"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 8, "content": " assertEquals(1000, linearRegression.getMaxIter());\n assertEquals(0.001, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.5, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(1000, linearRegression.getGlobalBatchSize());\n assertEquals(0.1, linearRegression.getReg(), TOLERANCE);\n assertEquals(0.5, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"test_predictionCol\", linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "db550ed25572b0a6a558212448f74d39c7728f27de1cfba35d8ef94d09378630"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 9, "content": " @Test\n public void testOutputSchema() {\n Table tempTable = trainDataTable.as(\"test_features\", \"test_label\", \"test_weight\");\n LinearRegression linearRegression =\n new LinearRegression()\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setPredictionCol(\"test_predictionCol\");\n Table output = linearRegression.fit(trainDataTable).transform(tempTable)[0];\n assertEquals(\n Arrays.asList(\"test_features\", \"test_label\", \"test_weight\", \"test_predictionCol\"),\n output.getResolvedSchema().getColumnNames());\n }\n\n", "meta": {"hash_id": "e55f66c2c1b2befb9f07c30623c3e3bad6aaf5c6326c131d953b15755ebc2534"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 10, "content": " @Test\n public void testFitAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "57591be2731619f3391370e78fd9fbabe5179630c39bf77aa5434b2cca26a517"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 11, "content": " @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "09fb53fff08e451999e42c7bb195d686a769c720d60be5e5bbd8334f77662cb1"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 12, "content": " @Test\n public void testSaveLoadAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n linearRegression =\n TestUtils.saveAndReload(\n tEnv,\n linearRegression,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegression::load);\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n", "meta": {"hash_id": "42f5333aad7d4c1cf308944ef835bfd73086e454ef58d3d37cf0f3c778c9345e"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 13, "content": " model =\n TestUtils.saveAndReload(\n tEnv,\n model,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegressionModel::load);\n assertEquals(\n Collections.singletonList(\"coefficient\"),\n model.getModelData()[0].getResolvedSchema().getColumnNames());\n Table output = model.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "84829f23d0af01b516e27b405e9a28e2bf2c6835ad1510ef023fd80496192731"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 14, "content": " @Test\n public void testGetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n assertNotNull(modelData);\n assertEquals(1, modelData.size());\n assertArrayEquals(\n expectedCoefficient, modelData.get(0).coefficient.values, COEFFICIENT_TOLERANCE);\n }\n\n", "meta": {"hash_id": "cdc28473bfc56d6b95126358c0107d18040933dc642eb5aaac51bf8b14a29d82"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 15, "content": " @Test\n public void testSetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n LinearRegressionModel newModel = new LinearRegressionModel();\n ParamUtils.updateExistingParams(newModel, model.getParamMap());\n newModel.setModelData(model.getModelData());\n Table output = newModel.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "e0638af5ebb01f03d45d0b53260048ec22bda4f28ccaa1d5ae89030cd5eca2ee"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 16, "content": " @Test\n public void testMoreSubtaskThanData() throws Exception {\n List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0));\n\n Table trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n\n", "meta": {"hash_id": "ce0c4297cfde091af04b47a3358737a6519da50f59e645c96d80c67ac4215006"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 17, "content": " LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n", "meta": {"hash_id": "2dd55b2e215554da2906279d52021e9a1cc879bf5cdda1ea1e1b1d8618c0638a"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 18, "content": " @SuppressWarnings(\"unchecked\")\n private void checkRegularization(double reg, double elasticNet, double[] expectedCoefficient)\n throws Exception {\n LinearRegressionModel model =\n new LinearRegression()\n .setWeightCol(\"weight\")\n .setReg(reg)\n .setElasticNet(elasticNet)\n .fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n final double errorTol = 1e-3;\n assertArrayEquals(expectedCoefficient, modelData.get(0).coefficient.values, errorTol);\n }\n}\n", "meta": {"hash_id": "b830e30b1badc75f54cfa2bb258a6ace9e7efc2f13956df9772ad4dfe5ea3c30"}}]}], "golden_chunks": [{"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 6, "content": " @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n", "meta": {"hash_id": "d781e76635abe0b93a702188b7240e31994bec50a525815a9eea0081345169eb"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-lib/src/test/java/org/apache/flink/ml/regression/LinearRegressionTest.java", "repo_name": "apache/flink-ml", "num_chunks": 19, "source": "codebases/jsonl"}} +{"query": "What does the testInputTypeConversion() method verify?", "answer": "The testInputTypeConversion() method checks that LinearRegression can handle input data with sparse int vector features. It converts the dense vectors in the trainDataTable to sparse int vectors using TestUtils.convertDataTypesToSparseInt(). It then verifies the converted column data types and runs training and prediction with LinearRegression. Finally, it calls verifyPredictionResult() to check the prediction results, ensuring LinearRegression works correctly with the converted input types.", "golden_doc_uuids": ["05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7"], "golden_chunk_uuids": [["05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", 11]], "golden_documents": [{"uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.regression;\n\nimport org.apache.flink.api.common.typeinfo.TypeInformation;\nimport org.apache.flink.api.common.typeinfo.Types;\nimport org.apache.flink.api.java.typeutils.RowTypeInfo;\nimport org.apache.flink.ml.linalg.SparseVector;\nimport org.apache.flink.ml.linalg.Vectors;\nimport org.apache.flink.ml.linalg.typeinfo.DenseVectorTypeInfo;\nimport org.apache.flink.ml.regression.linearregression.LinearRegression;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModel;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModelData;\nimport org.apache.flink.ml.util.ParamUtils;\nimport org.apache.flink.ml.util.TestUtils;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.test.util.AbstractTestBase;\nimport org.apache.flink.types.Row;\n\nimport org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.lang3.RandomUtils;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\n/** Tests {@link LinearRegression} and {@link LinearRegressionModel}. */\npublic class LinearRegressionTest extends AbstractTestBase {\n\n @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();\n\n private StreamExecutionEnvironment env;\n\n private StreamTableEnvironment tEnv;\n\n private static final List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(2, 4), 10.0, 1.0),\n Row.of(Vectors.dense(2, 2), 6.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(1, 2), 5.0, 1.0),\n Row.of(Vectors.dense(5, 3), 11.0, 1.0));\n\n private static final double[] expectedCoefficient = new double[] {1.141, 1.829};\n\n private static final double TOLERANCE = 1e-7;\n\n private static final double PREDICTION_TOLERANCE = 0.1;\n\n private static final double COEFFICIENT_TOLERANCE = 0.1;\n\n private Table trainDataTable;\n\n @Before\n public void before() {\n env = TestUtils.getExecutionEnvironment();\n tEnv = StreamTableEnvironment.create(env);\n Collections.shuffle(trainData);\n trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n }\n\n @SuppressWarnings(\"unchecked\")\n private void verifyPredictionResult(Table output, String labelCol, String predictionCol)\n throws Exception {\n List predResult = IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect());\n for (Row predictionRow : predResult) {\n double label = ((Number) predictionRow.getField(labelCol)).doubleValue();\n double prediction = (double) predictionRow.getField(predictionCol);\n assertTrue(Math.abs(prediction - label) / label < PREDICTION_TOLERANCE);\n }\n }\n\n @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n linearRegression\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setMaxIter(1000)\n .setTol(0.001)\n .setLearningRate(0.5)\n .setGlobalBatchSize(1000)\n .setReg(0.1)\n .setElasticNet(0.5)\n .setPredictionCol(\"test_predictionCol\");\n assertEquals(\"test_features\", linearRegression.getFeaturesCol());\n assertEquals(\"test_label\", linearRegression.getLabelCol());\n assertEquals(\"test_weight\", linearRegression.getWeightCol());\n assertEquals(1000, linearRegression.getMaxIter());\n assertEquals(0.001, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.5, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(1000, linearRegression.getGlobalBatchSize());\n assertEquals(0.1, linearRegression.getReg(), TOLERANCE);\n assertEquals(0.5, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"test_predictionCol\", linearRegression.getPredictionCol());\n }\n\n @Test\n public void testOutputSchema() {\n Table tempTable = trainDataTable.as(\"test_features\", \"test_label\", \"test_weight\");\n LinearRegression linearRegression =\n new LinearRegression()\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setPredictionCol(\"test_predictionCol\");\n Table output = linearRegression.fit(trainDataTable).transform(tempTable)[0];\n assertEquals(\n Arrays.asList(\"test_features\", \"test_label\", \"test_weight\", \"test_predictionCol\"),\n output.getResolvedSchema().getColumnNames());\n }\n\n @Test\n public void testFitAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testSaveLoadAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n linearRegression =\n TestUtils.saveAndReload(\n tEnv,\n linearRegression,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegression::load);\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n model =\n TestUtils.saveAndReload(\n tEnv,\n model,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegressionModel::load);\n assertEquals(\n Collections.singletonList(\"coefficient\"),\n model.getModelData()[0].getResolvedSchema().getColumnNames());\n Table output = model.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testGetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n assertNotNull(modelData);\n assertEquals(1, modelData.size());\n assertArrayEquals(\n expectedCoefficient, modelData.get(0).coefficient.values, COEFFICIENT_TOLERANCE);\n }\n\n @Test\n public void testSetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n LinearRegressionModel newModel = new LinearRegressionModel();\n ParamUtils.updateExistingParams(newModel, model.getParamMap());\n newModel.setModelData(model.getModelData());\n Table output = newModel.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testMoreSubtaskThanData() throws Exception {\n List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0));\n\n Table trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n\n LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n @SuppressWarnings(\"unchecked\")\n private void checkRegularization(double reg, double elasticNet, double[] expectedCoefficient)\n throws Exception {\n LinearRegressionModel model =\n new LinearRegression()\n .setWeightCol(\"weight\")\n .setReg(reg)\n .setElasticNet(elasticNet)\n .fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n final double errorTol = 1e-3;\n assertArrayEquals(expectedCoefficient, modelData.get(0).coefficient.values, errorTol);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-lib/src/test/java/org/apache/flink/ml/regression/LinearRegressionTest.java", "repo_name": "apache/flink-ml", "num_chunks": 19, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.regression;\n\n", "meta": {"hash_id": "f1f074c0fd078158148d151501d7946111aa5beae4ca844db35bd1a560aa6899"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 1, "content": "import org.apache.flink.api.common.typeinfo.TypeInformation;\nimport org.apache.flink.api.common.typeinfo.Types;\nimport org.apache.flink.api.java.typeutils.RowTypeInfo;\nimport org.apache.flink.ml.linalg.SparseVector;\nimport org.apache.flink.ml.linalg.Vectors;\nimport org.apache.flink.ml.linalg.typeinfo.DenseVectorTypeInfo;\nimport org.apache.flink.ml.regression.linearregression.LinearRegression;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModel;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModelData;\nimport org.apache.flink.ml.util.ParamUtils;\nimport org.apache.flink.ml.util.TestUtils;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.test.util.AbstractTestBase;\nimport org.apache.flink.types.Row;\n\n", "meta": {"hash_id": "4fd56a8bc9c186e18aeec2f650a620885587404ed33f5daca9c705369fcff695"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 2, "content": "import org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.lang3.RandomUtils;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\n/** Tests {@link LinearRegression} and {@link LinearRegressionModel}. */\npublic class LinearRegressionTest extends AbstractTestBase {\n\n @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();\n\n private StreamExecutionEnvironment env;\n\n private StreamTableEnvironment tEnv;\n\n", "meta": {"hash_id": "d16579e4e457a704813821d7ac4bd4c402fd8149142cdc3c04eef5956de4b5c7"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 3, "content": " private static final List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(2, 4), 10.0, 1.0),\n Row.of(Vectors.dense(2, 2), 6.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(1, 2), 5.0, 1.0),\n Row.of(Vectors.dense(5, 3), 11.0, 1.0));\n\n private static final double[] expectedCoefficient = new double[] {1.141, 1.829};\n\n private static final double TOLERANCE = 1e-7;\n\n private static final double PREDICTION_TOLERANCE = 0.1;\n\n private static final double COEFFICIENT_TOLERANCE = 0.1;\n\n private Table trainDataTable;\n\n", "meta": {"hash_id": "26b71b5090239d240d373018cb847aee96d45794086a3eb62cca7bfc39c24815"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 4, "content": " @Before\n public void before() {\n env = TestUtils.getExecutionEnvironment();\n tEnv = StreamTableEnvironment.create(env);\n Collections.shuffle(trainData);\n trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n }\n\n", "meta": {"hash_id": "1726bfc5a0db30210e196b06f06052440399e3d3b7a8cd02d271316953bfec56"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 5, "content": " @SuppressWarnings(\"unchecked\")\n private void verifyPredictionResult(Table output, String labelCol, String predictionCol)\n throws Exception {\n List predResult = IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect());\n for (Row predictionRow : predResult) {\n double label = ((Number) predictionRow.getField(labelCol)).doubleValue();\n double prediction = (double) predictionRow.getField(predictionCol);\n assertTrue(Math.abs(prediction - label) / label < PREDICTION_TOLERANCE);\n }\n }\n\n", "meta": {"hash_id": "505d0ab105dc894e438c5253a226b485d17023f16e74f258ea680563b1bb3ed8"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 6, "content": " @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n", "meta": {"hash_id": "d781e76635abe0b93a702188b7240e31994bec50a525815a9eea0081345169eb"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 7, "content": " linearRegression\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setMaxIter(1000)\n .setTol(0.001)\n .setLearningRate(0.5)\n .setGlobalBatchSize(1000)\n .setReg(0.1)\n .setElasticNet(0.5)\n .setPredictionCol(\"test_predictionCol\");\n assertEquals(\"test_features\", linearRegression.getFeaturesCol());\n assertEquals(\"test_label\", linearRegression.getLabelCol());\n assertEquals(\"test_weight\", linearRegression.getWeightCol());\n", "meta": {"hash_id": "8064169d6f95a943476903ff3cf7c41f3b964a4dbb978d0eb4004ce0fc24ae28"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 8, "content": " assertEquals(1000, linearRegression.getMaxIter());\n assertEquals(0.001, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.5, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(1000, linearRegression.getGlobalBatchSize());\n assertEquals(0.1, linearRegression.getReg(), TOLERANCE);\n assertEquals(0.5, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"test_predictionCol\", linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "db550ed25572b0a6a558212448f74d39c7728f27de1cfba35d8ef94d09378630"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 9, "content": " @Test\n public void testOutputSchema() {\n Table tempTable = trainDataTable.as(\"test_features\", \"test_label\", \"test_weight\");\n LinearRegression linearRegression =\n new LinearRegression()\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setPredictionCol(\"test_predictionCol\");\n Table output = linearRegression.fit(trainDataTable).transform(tempTable)[0];\n assertEquals(\n Arrays.asList(\"test_features\", \"test_label\", \"test_weight\", \"test_predictionCol\"),\n output.getResolvedSchema().getColumnNames());\n }\n\n", "meta": {"hash_id": "e55f66c2c1b2befb9f07c30623c3e3bad6aaf5c6326c131d953b15755ebc2534"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 10, "content": " @Test\n public void testFitAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "57591be2731619f3391370e78fd9fbabe5179630c39bf77aa5434b2cca26a517"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 11, "content": " @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "09fb53fff08e451999e42c7bb195d686a769c720d60be5e5bbd8334f77662cb1"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 12, "content": " @Test\n public void testSaveLoadAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n linearRegression =\n TestUtils.saveAndReload(\n tEnv,\n linearRegression,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegression::load);\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n", "meta": {"hash_id": "42f5333aad7d4c1cf308944ef835bfd73086e454ef58d3d37cf0f3c778c9345e"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 13, "content": " model =\n TestUtils.saveAndReload(\n tEnv,\n model,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegressionModel::load);\n assertEquals(\n Collections.singletonList(\"coefficient\"),\n model.getModelData()[0].getResolvedSchema().getColumnNames());\n Table output = model.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "84829f23d0af01b516e27b405e9a28e2bf2c6835ad1510ef023fd80496192731"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 14, "content": " @Test\n public void testGetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n assertNotNull(modelData);\n assertEquals(1, modelData.size());\n assertArrayEquals(\n expectedCoefficient, modelData.get(0).coefficient.values, COEFFICIENT_TOLERANCE);\n }\n\n", "meta": {"hash_id": "cdc28473bfc56d6b95126358c0107d18040933dc642eb5aaac51bf8b14a29d82"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 15, "content": " @Test\n public void testSetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n LinearRegressionModel newModel = new LinearRegressionModel();\n ParamUtils.updateExistingParams(newModel, model.getParamMap());\n newModel.setModelData(model.getModelData());\n Table output = newModel.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "e0638af5ebb01f03d45d0b53260048ec22bda4f28ccaa1d5ae89030cd5eca2ee"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 16, "content": " @Test\n public void testMoreSubtaskThanData() throws Exception {\n List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0));\n\n Table trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n\n", "meta": {"hash_id": "ce0c4297cfde091af04b47a3358737a6519da50f59e645c96d80c67ac4215006"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 17, "content": " LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n", "meta": {"hash_id": "2dd55b2e215554da2906279d52021e9a1cc879bf5cdda1ea1e1b1d8618c0638a"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 18, "content": " @SuppressWarnings(\"unchecked\")\n private void checkRegularization(double reg, double elasticNet, double[] expectedCoefficient)\n throws Exception {\n LinearRegressionModel model =\n new LinearRegression()\n .setWeightCol(\"weight\")\n .setReg(reg)\n .setElasticNet(elasticNet)\n .fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n final double errorTol = 1e-3;\n assertArrayEquals(expectedCoefficient, modelData.get(0).coefficient.values, errorTol);\n }\n}\n", "meta": {"hash_id": "b830e30b1badc75f54cfa2bb258a6ace9e7efc2f13956df9772ad4dfe5ea3c30"}}]}], "golden_chunks": [{"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 11, "content": " @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "09fb53fff08e451999e42c7bb195d686a769c720d60be5e5bbd8334f77662cb1"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-lib/src/test/java/org/apache/flink/ml/regression/LinearRegressionTest.java", "repo_name": "apache/flink-ml", "num_chunks": 19, "source": "codebases/jsonl"}} +{"query": "What does the testRegularization() method check?", "answer": "The testRegularization() method verifies that LinearRegression's L2 regularization and elastic net parameters work as expected. It calls the checkRegularization() method with different reg and elasticNet parameter values and expected model coefficients. checkRegularization() trains a LinearRegressionModel with the given parameters, gets the model coefficients, and verifies they match the expected values within an error tolerance using assertArrayEquals().", "golden_doc_uuids": ["05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7"], "golden_chunk_uuids": [["05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", 17]], "golden_documents": [{"uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.regression;\n\nimport org.apache.flink.api.common.typeinfo.TypeInformation;\nimport org.apache.flink.api.common.typeinfo.Types;\nimport org.apache.flink.api.java.typeutils.RowTypeInfo;\nimport org.apache.flink.ml.linalg.SparseVector;\nimport org.apache.flink.ml.linalg.Vectors;\nimport org.apache.flink.ml.linalg.typeinfo.DenseVectorTypeInfo;\nimport org.apache.flink.ml.regression.linearregression.LinearRegression;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModel;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModelData;\nimport org.apache.flink.ml.util.ParamUtils;\nimport org.apache.flink.ml.util.TestUtils;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.test.util.AbstractTestBase;\nimport org.apache.flink.types.Row;\n\nimport org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.lang3.RandomUtils;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\n/** Tests {@link LinearRegression} and {@link LinearRegressionModel}. */\npublic class LinearRegressionTest extends AbstractTestBase {\n\n @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();\n\n private StreamExecutionEnvironment env;\n\n private StreamTableEnvironment tEnv;\n\n private static final List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(2, 4), 10.0, 1.0),\n Row.of(Vectors.dense(2, 2), 6.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(1, 2), 5.0, 1.0),\n Row.of(Vectors.dense(5, 3), 11.0, 1.0));\n\n private static final double[] expectedCoefficient = new double[] {1.141, 1.829};\n\n private static final double TOLERANCE = 1e-7;\n\n private static final double PREDICTION_TOLERANCE = 0.1;\n\n private static final double COEFFICIENT_TOLERANCE = 0.1;\n\n private Table trainDataTable;\n\n @Before\n public void before() {\n env = TestUtils.getExecutionEnvironment();\n tEnv = StreamTableEnvironment.create(env);\n Collections.shuffle(trainData);\n trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n }\n\n @SuppressWarnings(\"unchecked\")\n private void verifyPredictionResult(Table output, String labelCol, String predictionCol)\n throws Exception {\n List predResult = IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect());\n for (Row predictionRow : predResult) {\n double label = ((Number) predictionRow.getField(labelCol)).doubleValue();\n double prediction = (double) predictionRow.getField(predictionCol);\n assertTrue(Math.abs(prediction - label) / label < PREDICTION_TOLERANCE);\n }\n }\n\n @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n linearRegression\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setMaxIter(1000)\n .setTol(0.001)\n .setLearningRate(0.5)\n .setGlobalBatchSize(1000)\n .setReg(0.1)\n .setElasticNet(0.5)\n .setPredictionCol(\"test_predictionCol\");\n assertEquals(\"test_features\", linearRegression.getFeaturesCol());\n assertEquals(\"test_label\", linearRegression.getLabelCol());\n assertEquals(\"test_weight\", linearRegression.getWeightCol());\n assertEquals(1000, linearRegression.getMaxIter());\n assertEquals(0.001, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.5, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(1000, linearRegression.getGlobalBatchSize());\n assertEquals(0.1, linearRegression.getReg(), TOLERANCE);\n assertEquals(0.5, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"test_predictionCol\", linearRegression.getPredictionCol());\n }\n\n @Test\n public void testOutputSchema() {\n Table tempTable = trainDataTable.as(\"test_features\", \"test_label\", \"test_weight\");\n LinearRegression linearRegression =\n new LinearRegression()\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setPredictionCol(\"test_predictionCol\");\n Table output = linearRegression.fit(trainDataTable).transform(tempTable)[0];\n assertEquals(\n Arrays.asList(\"test_features\", \"test_label\", \"test_weight\", \"test_predictionCol\"),\n output.getResolvedSchema().getColumnNames());\n }\n\n @Test\n public void testFitAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testSaveLoadAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n linearRegression =\n TestUtils.saveAndReload(\n tEnv,\n linearRegression,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegression::load);\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n model =\n TestUtils.saveAndReload(\n tEnv,\n model,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegressionModel::load);\n assertEquals(\n Collections.singletonList(\"coefficient\"),\n model.getModelData()[0].getResolvedSchema().getColumnNames());\n Table output = model.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testGetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n assertNotNull(modelData);\n assertEquals(1, modelData.size());\n assertArrayEquals(\n expectedCoefficient, modelData.get(0).coefficient.values, COEFFICIENT_TOLERANCE);\n }\n\n @Test\n public void testSetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n LinearRegressionModel newModel = new LinearRegressionModel();\n ParamUtils.updateExistingParams(newModel, model.getParamMap());\n newModel.setModelData(model.getModelData());\n Table output = newModel.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testMoreSubtaskThanData() throws Exception {\n List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0));\n\n Table trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n\n LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n @SuppressWarnings(\"unchecked\")\n private void checkRegularization(double reg, double elasticNet, double[] expectedCoefficient)\n throws Exception {\n LinearRegressionModel model =\n new LinearRegression()\n .setWeightCol(\"weight\")\n .setReg(reg)\n .setElasticNet(elasticNet)\n .fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n final double errorTol = 1e-3;\n assertArrayEquals(expectedCoefficient, modelData.get(0).coefficient.values, errorTol);\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-lib/src/test/java/org/apache/flink/ml/regression/LinearRegressionTest.java", "repo_name": "apache/flink-ml", "num_chunks": 19, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.regression;\n\n", "meta": {"hash_id": "f1f074c0fd078158148d151501d7946111aa5beae4ca844db35bd1a560aa6899"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 1, "content": "import org.apache.flink.api.common.typeinfo.TypeInformation;\nimport org.apache.flink.api.common.typeinfo.Types;\nimport org.apache.flink.api.java.typeutils.RowTypeInfo;\nimport org.apache.flink.ml.linalg.SparseVector;\nimport org.apache.flink.ml.linalg.Vectors;\nimport org.apache.flink.ml.linalg.typeinfo.DenseVectorTypeInfo;\nimport org.apache.flink.ml.regression.linearregression.LinearRegression;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModel;\nimport org.apache.flink.ml.regression.linearregression.LinearRegressionModelData;\nimport org.apache.flink.ml.util.ParamUtils;\nimport org.apache.flink.ml.util.TestUtils;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.test.util.AbstractTestBase;\nimport org.apache.flink.types.Row;\n\n", "meta": {"hash_id": "4fd56a8bc9c186e18aeec2f650a620885587404ed33f5daca9c705369fcff695"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 2, "content": "import org.apache.commons.collections.IteratorUtils;\nimport org.apache.commons.lang3.RandomUtils;\nimport org.junit.Before;\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\n\nimport static org.junit.Assert.assertArrayEquals;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertNotNull;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\n\n/** Tests {@link LinearRegression} and {@link LinearRegressionModel}. */\npublic class LinearRegressionTest extends AbstractTestBase {\n\n @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();\n\n private StreamExecutionEnvironment env;\n\n private StreamTableEnvironment tEnv;\n\n", "meta": {"hash_id": "d16579e4e457a704813821d7ac4bd4c402fd8149142cdc3c04eef5956de4b5c7"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 3, "content": " private static final List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(2, 4), 10.0, 1.0),\n Row.of(Vectors.dense(2, 2), 6.0, 1.0),\n Row.of(Vectors.dense(4, 3), 10.0, 1.0),\n Row.of(Vectors.dense(1, 2), 5.0, 1.0),\n Row.of(Vectors.dense(5, 3), 11.0, 1.0));\n\n private static final double[] expectedCoefficient = new double[] {1.141, 1.829};\n\n private static final double TOLERANCE = 1e-7;\n\n private static final double PREDICTION_TOLERANCE = 0.1;\n\n private static final double COEFFICIENT_TOLERANCE = 0.1;\n\n private Table trainDataTable;\n\n", "meta": {"hash_id": "26b71b5090239d240d373018cb847aee96d45794086a3eb62cca7bfc39c24815"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 4, "content": " @Before\n public void before() {\n env = TestUtils.getExecutionEnvironment();\n tEnv = StreamTableEnvironment.create(env);\n Collections.shuffle(trainData);\n trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n }\n\n", "meta": {"hash_id": "1726bfc5a0db30210e196b06f06052440399e3d3b7a8cd02d271316953bfec56"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 5, "content": " @SuppressWarnings(\"unchecked\")\n private void verifyPredictionResult(Table output, String labelCol, String predictionCol)\n throws Exception {\n List predResult = IteratorUtils.toList(tEnv.toDataStream(output).executeAndCollect());\n for (Row predictionRow : predResult) {\n double label = ((Number) predictionRow.getField(labelCol)).doubleValue();\n double prediction = (double) predictionRow.getField(predictionCol);\n assertTrue(Math.abs(prediction - label) / label < PREDICTION_TOLERANCE);\n }\n }\n\n", "meta": {"hash_id": "505d0ab105dc894e438c5253a226b485d17023f16e74f258ea680563b1bb3ed8"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 6, "content": " @Test\n public void testParam() {\n LinearRegression linearRegression = new LinearRegression();\n assertEquals(\"features\", linearRegression.getFeaturesCol());\n assertEquals(\"label\", linearRegression.getLabelCol());\n assertNull(linearRegression.getWeightCol());\n assertEquals(20, linearRegression.getMaxIter());\n assertEquals(1e-6, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.1, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(32, linearRegression.getGlobalBatchSize());\n assertEquals(0, linearRegression.getReg(), TOLERANCE);\n assertEquals(0, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"prediction\", linearRegression.getPredictionCol());\n\n", "meta": {"hash_id": "d781e76635abe0b93a702188b7240e31994bec50a525815a9eea0081345169eb"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 7, "content": " linearRegression\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setMaxIter(1000)\n .setTol(0.001)\n .setLearningRate(0.5)\n .setGlobalBatchSize(1000)\n .setReg(0.1)\n .setElasticNet(0.5)\n .setPredictionCol(\"test_predictionCol\");\n assertEquals(\"test_features\", linearRegression.getFeaturesCol());\n assertEquals(\"test_label\", linearRegression.getLabelCol());\n assertEquals(\"test_weight\", linearRegression.getWeightCol());\n", "meta": {"hash_id": "8064169d6f95a943476903ff3cf7c41f3b964a4dbb978d0eb4004ce0fc24ae28"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 8, "content": " assertEquals(1000, linearRegression.getMaxIter());\n assertEquals(0.001, linearRegression.getTol(), TOLERANCE);\n assertEquals(0.5, linearRegression.getLearningRate(), TOLERANCE);\n assertEquals(1000, linearRegression.getGlobalBatchSize());\n assertEquals(0.1, linearRegression.getReg(), TOLERANCE);\n assertEquals(0.5, linearRegression.getElasticNet(), TOLERANCE);\n assertEquals(\"test_predictionCol\", linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "db550ed25572b0a6a558212448f74d39c7728f27de1cfba35d8ef94d09378630"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 9, "content": " @Test\n public void testOutputSchema() {\n Table tempTable = trainDataTable.as(\"test_features\", \"test_label\", \"test_weight\");\n LinearRegression linearRegression =\n new LinearRegression()\n .setFeaturesCol(\"test_features\")\n .setLabelCol(\"test_label\")\n .setWeightCol(\"test_weight\")\n .setPredictionCol(\"test_predictionCol\");\n Table output = linearRegression.fit(trainDataTable).transform(tempTable)[0];\n assertEquals(\n Arrays.asList(\"test_features\", \"test_label\", \"test_weight\", \"test_predictionCol\"),\n output.getResolvedSchema().getColumnNames());\n }\n\n", "meta": {"hash_id": "e55f66c2c1b2befb9f07c30623c3e3bad6aaf5c6326c131d953b15755ebc2534"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 10, "content": " @Test\n public void testFitAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "57591be2731619f3391370e78fd9fbabe5179630c39bf77aa5434b2cca26a517"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 11, "content": " @Test\n public void testInputTypeConversion() throws Exception {\n trainDataTable = TestUtils.convertDataTypesToSparseInt(tEnv, trainDataTable);\n assertArrayEquals(\n new Class[] {SparseVector.class, Integer.class, Integer.class},\n TestUtils.getColumnDataTypes(trainDataTable));\n\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "09fb53fff08e451999e42c7bb195d686a769c720d60be5e5bbd8334f77662cb1"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 12, "content": " @Test\n public void testSaveLoadAndPredict() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n linearRegression =\n TestUtils.saveAndReload(\n tEnv,\n linearRegression,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegression::load);\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n", "meta": {"hash_id": "42f5333aad7d4c1cf308944ef835bfd73086e454ef58d3d37cf0f3c778c9345e"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 13, "content": " model =\n TestUtils.saveAndReload(\n tEnv,\n model,\n tempFolder.newFolder().getAbsolutePath(),\n LinearRegressionModel::load);\n assertEquals(\n Collections.singletonList(\"coefficient\"),\n model.getModelData()[0].getResolvedSchema().getColumnNames());\n Table output = model.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "84829f23d0af01b516e27b405e9a28e2bf2c6835ad1510ef023fd80496192731"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 14, "content": " @Test\n public void testGetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n assertNotNull(modelData);\n assertEquals(1, modelData.size());\n assertArrayEquals(\n expectedCoefficient, modelData.get(0).coefficient.values, COEFFICIENT_TOLERANCE);\n }\n\n", "meta": {"hash_id": "cdc28473bfc56d6b95126358c0107d18040933dc642eb5aaac51bf8b14a29d82"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 15, "content": " @Test\n public void testSetModelData() throws Exception {\n LinearRegression linearRegression = new LinearRegression().setWeightCol(\"weight\");\n LinearRegressionModel model = linearRegression.fit(trainDataTable);\n\n LinearRegressionModel newModel = new LinearRegressionModel();\n ParamUtils.updateExistingParams(newModel, model.getParamMap());\n newModel.setModelData(model.getModelData());\n Table output = newModel.transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n", "meta": {"hash_id": "e0638af5ebb01f03d45d0b53260048ec22bda4f28ccaa1d5ae89030cd5eca2ee"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 16, "content": " @Test\n public void testMoreSubtaskThanData() throws Exception {\n List trainData =\n Arrays.asList(\n Row.of(Vectors.dense(2, 1), 4.0, 1.0),\n Row.of(Vectors.dense(3, 2), 7.0, 1.0));\n\n Table trainDataTable =\n tEnv.fromDataStream(\n env.fromCollection(\n trainData,\n new RowTypeInfo(\n new TypeInformation[] {\n DenseVectorTypeInfo.INSTANCE, Types.DOUBLE, Types.DOUBLE\n },\n new String[] {\"features\", \"label\", \"weight\"})));\n\n", "meta": {"hash_id": "ce0c4297cfde091af04b47a3358737a6519da50f59e645c96d80c67ac4215006"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 17, "content": " LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n", "meta": {"hash_id": "2dd55b2e215554da2906279d52021e9a1cc879bf5cdda1ea1e1b1d8618c0638a"}}, {"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 18, "content": " @SuppressWarnings(\"unchecked\")\n private void checkRegularization(double reg, double elasticNet, double[] expectedCoefficient)\n throws Exception {\n LinearRegressionModel model =\n new LinearRegression()\n .setWeightCol(\"weight\")\n .setReg(reg)\n .setElasticNet(elasticNet)\n .fit(trainDataTable);\n List modelData =\n IteratorUtils.toList(\n LinearRegressionModelData.getModelDataStream(model.getModelData()[0])\n .executeAndCollect());\n final double errorTol = 1e-3;\n assertArrayEquals(expectedCoefficient, modelData.get(0).coefficient.values, errorTol);\n }\n}\n", "meta": {"hash_id": "b830e30b1badc75f54cfa2bb258a6ace9e7efc2f13956df9772ad4dfe5ea3c30"}}]}], "golden_chunks": [{"doc_uuid": "05d6124b529ce8a984aa95f59f61ccb660082f18e91562e90a60221421a2fba7", "index": 17, "content": " LinearRegression linearRegression =\n new LinearRegression().setWeightCol(\"weight\").setGlobalBatchSize(128);\n Table output = linearRegression.fit(trainDataTable).transform(trainDataTable)[0];\n verifyPredictionResult(\n output, linearRegression.getLabelCol(), linearRegression.getPredictionCol());\n }\n\n @Test\n public void testRegularization() throws Exception {\n checkRegularization(0, RandomUtils.nextDouble(0, 1), expectedCoefficient);\n checkRegularization(0.1, 0, new double[] {1.165, 1.780});\n checkRegularization(0.1, 1, new double[] {1.143, 1.812});\n checkRegularization(0.1, 0.5, new double[] {1.154, 1.796});\n }\n\n", "meta": {"hash_id": "2dd55b2e215554da2906279d52021e9a1cc879bf5cdda1ea1e1b1d8618c0638a"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-lib/src/test/java/org/apache/flink/ml/regression/LinearRegressionTest.java", "repo_name": "apache/flink-ml", "num_chunks": 19, "source": "codebases/jsonl"}} +{"query": "What is the maximum size of a segment that the MemorySegmentWriter will write?", "answer": "The MemorySegmentWriter will not write segments larger than DataCacheWriter.MAX_SEGMENT_SIZE bytes. In the addRecord method:\n\n public boolean addRecord(T record) throws IOException {\n if (outputStream.getPos() >= DataCacheWriter.MAX_SEGMENT_SIZE) {\n return false;\n }\n ...\n }\n\nIf the current position in the output stream exceeds MAX_SEGMENT_SIZE, it will not write the record and will return false.", "golden_doc_uuids": ["7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6"], "golden_chunk_uuids": [["7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", 3]], "golden_documents": [{"uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.iteration.datacache.nonkeyed;\n\nimport org.apache.flink.annotation.Internal;\nimport org.apache.flink.api.common.typeutils.TypeSerializer;\nimport org.apache.flink.core.fs.Path;\nimport org.apache.flink.core.memory.DataOutputView;\nimport org.apache.flink.core.memory.DataOutputViewStreamWrapper;\nimport org.apache.flink.core.memory.MemorySegment;\nimport org.apache.flink.runtime.memory.MemoryAllocationException;\nimport org.apache.flink.table.runtime.util.MemorySegmentPool;\n\nimport javax.annotation.Nullable;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Optional;\n\n/** A class that writes cache data to memory segments. */\n@Internal\nclass MemorySegmentWriter implements SegmentWriter {\n\n /** The tool to serialize received records into bytes. */\n private final TypeSerializer serializer;\n\n /** The pre-allocated path to hold cached records into the file system. */\n private final Path path;\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The output stream to write serialized content to memory segments. */\n private final ManagedMemoryOutputStream outputStream;\n\n /** The wrapper view of the output stream to be used with TypeSerializer API. */\n private final DataOutputView outputView;\n\n /** The number of records added so far. */\n private int count;\n\n MemorySegmentWriter(\n TypeSerializer serializer,\n Path path,\n MemorySegmentPool segmentPool,\n long expectedSize)\n throws MemoryAllocationException {\n this.serializer = serializer;\n this.path = path;\n this.segmentPool = segmentPool;\n this.outputStream = new ManagedMemoryOutputStream(segmentPool, expectedSize);\n this.outputView = new DataOutputViewStreamWrapper(outputStream);\n this.count = 0;\n }\n\n @Override\n public boolean addRecord(T record) throws IOException {\n if (outputStream.getPos() >= DataCacheWriter.MAX_SEGMENT_SIZE) {\n return false;\n }\n try {\n serializer.serialize(record, outputView);\n count++;\n return true;\n } catch (RuntimeException e) {\n if (e.getCause() instanceof MemoryAllocationException) {\n return false;\n }\n throw e;\n }\n }\n\n @Override\n public Optional finish() throws IOException {\n if (count > 0) {\n return Optional.of(new Segment(path, count, outputStream.getSegments()));\n } else {\n segmentPool.returnAll(outputStream.getSegments());\n return Optional.empty();\n }\n }\n\n /** An output stream subclass that accepts bytes and writes them to memory segments. */\n private static class ManagedMemoryOutputStream extends OutputStream {\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The number of bytes in a memory segment. */\n private final int pageSize;\n\n /** The memory segments containing written bytes. */\n private final List segments = new ArrayList<>();\n\n /** The index of the segment that currently accepts written bytes. */\n private int segmentIndex;\n\n /** The number of bytes in the current segment that have been written. */\n private int segmentOffset;\n\n /** The number of bytes that have been written so far. */\n private long globalOffset;\n\n /** The number of bytes that have been allocated so far. */\n private long allocatedBytes;\n\n public ManagedMemoryOutputStream(MemorySegmentPool segmentPool, long expectedSize)\n throws MemoryAllocationException {\n this.segmentPool = segmentPool;\n this.pageSize = segmentPool.pageSize();\n ensureCapacity(Math.max(expectedSize, 1L));\n }\n\n public long getPos() {\n return globalOffset;\n }\n\n public List getSegments() {\n return segments;\n }\n\n @Override\n public void write(int b) throws IOException {\n write(new byte[] {(byte) b}, 0, 1);\n }\n\n @Override\n public void write(@Nullable byte[] b, int off, int len) throws IOException {\n try {\n ensureCapacity(globalOffset + len);\n } catch (MemoryAllocationException e) {\n throw new RuntimeException(e);\n }\n\n while (len > 0) {\n int currentLen = Math.min(len, pageSize - segmentOffset);\n segments.get(segmentIndex).put(segmentOffset, b, off, currentLen);\n segmentOffset += currentLen;\n globalOffset += currentLen;\n if (segmentOffset >= pageSize) {\n segmentIndex++;\n segmentOffset = 0;\n }\n off += currentLen;\n len -= currentLen;\n }\n }\n\n private void ensureCapacity(long capacity) throws MemoryAllocationException {\n if (allocatedBytes >= capacity) {\n return;\n }\n\n int required =\n (int) (capacity % pageSize == 0 ? capacity / pageSize : capacity / pageSize + 1)\n - segments.size();\n\n List allocatedSegments = new ArrayList<>();\n for (int i = 0; i < required; i++) {\n MemorySegment memorySegment = segmentPool.nextSegment();\n if (memorySegment == null) {\n segmentPool.returnAll(allocatedSegments);\n throw new MemoryAllocationException();\n }\n allocatedSegments.add(memorySegment);\n }\n\n segments.addAll(allocatedSegments);\n allocatedBytes += (long) allocatedSegments.size() * pageSize;\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-iteration/flink-ml-iteration-common/src/main/java/org/apache/flink/iteration/datacache/nonkeyed/MemorySegmentWriter.java", "repo_name": "apache/flink-ml", "num_chunks": 9, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "1130ce71319c9b226a548fa80bf076d1664345883edba132453eb2b62e3759bb"}}, {"doc_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "index": 1, "content": "package org.apache.flink.iteration.datacache.nonkeyed;\n\nimport org.apache.flink.annotation.Internal;\nimport org.apache.flink.api.common.typeutils.TypeSerializer;\nimport org.apache.flink.core.fs.Path;\nimport org.apache.flink.core.memory.DataOutputView;\nimport org.apache.flink.core.memory.DataOutputViewStreamWrapper;\nimport org.apache.flink.core.memory.MemorySegment;\nimport org.apache.flink.runtime.memory.MemoryAllocationException;\nimport org.apache.flink.table.runtime.util.MemorySegmentPool;\n\nimport javax.annotation.Nullable;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Optional;\n\n/** A class that writes cache data to memory segments. */\n@Internal\nclass MemorySegmentWriter implements SegmentWriter {\n\n /** The tool to serialize received records into bytes. */\n private final TypeSerializer serializer;\n\n", "meta": {"hash_id": "11609bc62d0d015e290a7a4369d610d81893d45888a7bae5c1d4a6cb80c2a0d4"}}, {"doc_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "index": 2, "content": " /** The pre-allocated path to hold cached records into the file system. */\n private final Path path;\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The output stream to write serialized content to memory segments. */\n private final ManagedMemoryOutputStream outputStream;\n\n /** The wrapper view of the output stream to be used with TypeSerializer API. */\n private final DataOutputView outputView;\n\n /** The number of records added so far. */\n private int count;\n\n", "meta": {"hash_id": "b0a1b3ffbc6164099e8a45d2640b616f33f227b9c6bf7d6b4af2af81bf20276d"}}, {"doc_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "index": 3, "content": " MemorySegmentWriter(\n TypeSerializer serializer,\n Path path,\n MemorySegmentPool segmentPool,\n long expectedSize)\n throws MemoryAllocationException {\n this.serializer = serializer;\n this.path = path;\n this.segmentPool = segmentPool;\n this.outputStream = new ManagedMemoryOutputStream(segmentPool, expectedSize);\n this.outputView = new DataOutputViewStreamWrapper(outputStream);\n this.count = 0;\n }\n\n @Override\n public boolean addRecord(T record) throws IOException {\n if (outputStream.getPos() >= DataCacheWriter.MAX_SEGMENT_SIZE) {\n return false;\n }\n try {\n serializer.serialize(record, outputView);\n count++;\n return true;\n } catch (RuntimeException e) {\n if (e.getCause() instanceof MemoryAllocationException) {\n return false;\n }\n throw e;\n }\n }\n\n", "meta": {"hash_id": "4e108611ebbb85f7e17e8a2d00d974b7c130c19405819ddb3438c076fc0d91f8"}}, {"doc_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "index": 4, "content": " @Override\n public Optional finish() throws IOException {\n if (count > 0) {\n return Optional.of(new Segment(path, count, outputStream.getSegments()));\n } else {\n segmentPool.returnAll(outputStream.getSegments());\n return Optional.empty();\n }\n }\n\n /** An output stream subclass that accepts bytes and writes them to memory segments. */\n private static class ManagedMemoryOutputStream extends OutputStream {\n\n /** The pool to allocate memory segments from. */\n private final MemorySegmentPool segmentPool;\n\n /** The number of bytes in a memory segment. */\n private final int pageSize;\n\n /** The memory segments containing written bytes. */\n private final List segments = new ArrayList<>();\n\n", "meta": {"hash_id": "21039d46dda5e883a86f1a0a4e157c13bb6d5c9656fddd0184830a222b2e667b"}}, {"doc_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "index": 5, "content": " /** The index of the segment that currently accepts written bytes. */\n private int segmentIndex;\n\n /** The number of bytes in the current segment that have been written. */\n private int segmentOffset;\n\n /** The number of bytes that have been written so far. */\n private long globalOffset;\n\n /** The number of bytes that have been allocated so far. */\n private long allocatedBytes;\n\n public ManagedMemoryOutputStream(MemorySegmentPool segmentPool, long expectedSize)\n throws MemoryAllocationException {\n this.segmentPool = segmentPool;\n this.pageSize = segmentPool.pageSize();\n ensureCapacity(Math.max(expectedSize, 1L));\n }\n\n", "meta": {"hash_id": "65d3c15d8252b616cd7ea5e83e4a9151f52491630ba000f37fb10bfae103020a"}}, {"doc_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "index": 6, "content": " public long getPos() {\n return globalOffset;\n }\n\n public List getSegments() {\n return segments;\n }\n\n @Override\n public void write(int b) throws IOException {\n write(new byte[] {(byte) b}, 0, 1);\n }\n\n @Override\n public void write(@Nullable byte[] b, int off, int len) throws IOException {\n try {\n ensureCapacity(globalOffset + len);\n } catch (MemoryAllocationException e) {\n throw new RuntimeException(e);\n }\n\n", "meta": {"hash_id": "0743c21ee6d59e80a0000ac697d7ea58798f623ca04c85b44153e763fe44075b"}}, {"doc_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "index": 7, "content": " while (len > 0) {\n int currentLen = Math.min(len, pageSize - segmentOffset);\n segments.get(segmentIndex).put(segmentOffset, b, off, currentLen);\n segmentOffset += currentLen;\n globalOffset += currentLen;\n if (segmentOffset >= pageSize) {\n segmentIndex++;\n segmentOffset = 0;\n }\n off += currentLen;\n len -= currentLen;\n }\n }\n\n private void ensureCapacity(long capacity) throws MemoryAllocationException {\n if (allocatedBytes >= capacity) {\n return;\n }\n\n int required =\n (int) (capacity % pageSize == 0 ? capacity / pageSize : capacity / pageSize + 1)\n - segments.size();\n\n", "meta": {"hash_id": "8e2bba5f7f2f1778b5f69ddad84019cccf6a982e9c3160ff67b6ebd6f5e8a55d"}}, {"doc_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "index": 8, "content": " List allocatedSegments = new ArrayList<>();\n for (int i = 0; i < required; i++) {\n MemorySegment memorySegment = segmentPool.nextSegment();\n if (memorySegment == null) {\n segmentPool.returnAll(allocatedSegments);\n throw new MemoryAllocationException();\n }\n allocatedSegments.add(memorySegment);\n }\n\n segments.addAll(allocatedSegments);\n allocatedBytes += (long) allocatedSegments.size() * pageSize;\n }\n }\n}\n", "meta": {"hash_id": "4b0316b293c586547e1f84719f6da78d79d5712212128c9868757b3914572cc6"}}]}], "golden_chunks": [{"doc_uuid": "7d99db65761e10f4f846eba27371bad7ae1f9c66ece73f42888388a4b2949ea6", "index": 3, "content": " MemorySegmentWriter(\n TypeSerializer serializer,\n Path path,\n MemorySegmentPool segmentPool,\n long expectedSize)\n throws MemoryAllocationException {\n this.serializer = serializer;\n this.path = path;\n this.segmentPool = segmentPool;\n this.outputStream = new ManagedMemoryOutputStream(segmentPool, expectedSize);\n this.outputView = new DataOutputViewStreamWrapper(outputStream);\n this.count = 0;\n }\n\n @Override\n public boolean addRecord(T record) throws IOException {\n if (outputStream.getPos() >= DataCacheWriter.MAX_SEGMENT_SIZE) {\n return false;\n }\n try {\n serializer.serialize(record, outputView);\n count++;\n return true;\n } catch (RuntimeException e) {\n if (e.getCause() instanceof MemoryAllocationException) {\n return false;\n }\n throw e;\n }\n }\n\n", "meta": {"hash_id": "4e108611ebbb85f7e17e8a2d00d974b7c130c19405819ddb3438c076fc0d91f8"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-iteration/flink-ml-iteration-common/src/main/java/org/apache/flink/iteration/datacache/nonkeyed/MemorySegmentWriter.java", "repo_name": "apache/flink-ml", "num_chunks": 9, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the HeadOperatorCoordinator class?", "answer": "Based on the test name \"testForwardEvents\" and the comments, the purpose of the HeadOperatorCoordinator seems to be to forward events received from subtasks to other tasks, after aligning the events globally across all parallel subtasks. This can be seen in the test which creates coordinators, sends them SubtaskAlignedEvents, and verifies that the coordinators send out GloballyAlignedEvents in response.", "golden_doc_uuids": ["e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320"], "golden_chunk_uuids": [["e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", 2], ["e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", 1]], "golden_documents": [{"uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.iteration.operator.coordinator;\n\nimport org.apache.flink.iteration.IterationID;\nimport org.apache.flink.iteration.operator.event.GloballyAlignedEvent;\nimport org.apache.flink.iteration.operator.event.SubtaskAlignedEvent;\nimport org.apache.flink.runtime.jobgraph.OperatorID;\nimport org.apache.flink.runtime.operators.coordination.EventReceivingTasks;\nimport org.apache.flink.runtime.operators.coordination.MockOperatorCoordinatorContext;\nimport org.apache.flink.runtime.operators.coordination.OperatorEvent;\nimport org.apache.flink.util.TestLogger;\n\nimport org.junit.Test;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.function.BiFunction;\n\nimport static org.junit.Assert.assertEquals;\n\n/** Tests the behavior of {@link HeadOperatorCoordinator}. */\npublic class HeadOperatorCoordinatorTest extends TestLogger {\n\n @Test(timeout = 60000L)\n public void testForwardEvents() throws Exception {\n IterationID iterationId = new IterationID();\n List operatorIds = Arrays.asList(new OperatorID(), new OperatorID());\n List parallelisms = Arrays.asList(2, 3);\n List receivingTasks =\n Arrays.asList(\n EventReceivingTasks.createForRunningTasks(),\n EventReceivingTasks.createForRunningTasks());\n List coordinators = new ArrayList<>();\n\n int totalParallelism = parallelisms.stream().mapToInt(i -> i).sum();\n\n for (int i = 0; i < operatorIds.size(); ++i) {\n HeadOperatorCoordinator coordinator =\n createCoordinator(iterationId, parallelisms.get(i), totalParallelism);\n setAllSubtasksReady(coordinator, receivingTasks.get(i), parallelisms.get(i));\n coordinators.add(coordinator);\n }\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(2, j, false)));\n checkSentEvent(1, new GloballyAlignedEvent(2, false), receivingTasks, parallelisms);\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(3, 0, false)));\n checkSentEvent(2, new GloballyAlignedEvent(3, true), receivingTasks, parallelisms);\n }\n\n private HeadOperatorCoordinator createCoordinator(\n IterationID iterationId, int parallelism, int totalHeadParallelism) {\n MockOperatorCoordinatorContext context =\n new MockOperatorCoordinatorContext(new OperatorID(), parallelism);\n return (HeadOperatorCoordinator)\n new HeadOperatorCoordinator.HeadOperatorCoordinatorProvider(\n new OperatorID(), iterationId, totalHeadParallelism)\n .create(context);\n }\n\n private void setAllSubtasksReady(\n HeadOperatorCoordinator coordinator,\n EventReceivingTasks receivingTasks,\n int parallelism) {\n for (int i = 0; i < parallelism; i++) {\n coordinator.executionAttemptReady(i, 0, receivingTasks.createGatewayForSubtask(i, 0));\n }\n }\n\n private void receiveEvent(\n List coordinators,\n List parallelisms,\n BiFunction> eventFactory)\n throws Exception {\n for (int i = 0; i < coordinators.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n List events = eventFactory.apply(i, j);\n for (OperatorEvent event : events) {\n coordinators.get(i).handleEventFromOperator(j, 0, event);\n }\n }\n }\n }\n\n private void checkSentEvent(\n int expectedNumEvents,\n GloballyAlignedEvent expectedLastEvent,\n List receivingTasks,\n List parallelisms)\n throws InterruptedException {\n for (int i = 0; i < parallelisms.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n while (true) {\n List events = receivingTasks.get(i).getSentEventsForSubtask(j);\n if (events.size() < expectedNumEvents) {\n Thread.sleep(50);\n continue;\n }\n\n assertEquals(expectedLastEvent, events.get(events.size() - 1));\n break;\n }\n }\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-iteration/flink-ml-iteration-common/src/test/java/org/apache/flink/iteration/operator/coordinator/HeadOperatorCoordinatorTest.java", "repo_name": "apache/flink-ml", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "9b9a8e3902f066ec72ab2210eabb6321938ff586eb4f81aa28c4f8b1e5017e95"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 1, "content": "package org.apache.flink.iteration.operator.coordinator;\n\nimport org.apache.flink.iteration.IterationID;\nimport org.apache.flink.iteration.operator.event.GloballyAlignedEvent;\nimport org.apache.flink.iteration.operator.event.SubtaskAlignedEvent;\nimport org.apache.flink.runtime.jobgraph.OperatorID;\nimport org.apache.flink.runtime.operators.coordination.EventReceivingTasks;\nimport org.apache.flink.runtime.operators.coordination.MockOperatorCoordinatorContext;\nimport org.apache.flink.runtime.operators.coordination.OperatorEvent;\nimport org.apache.flink.util.TestLogger;\n\nimport org.junit.Test;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.function.BiFunction;\n\nimport static org.junit.Assert.assertEquals;\n\n/** Tests the behavior of {@link HeadOperatorCoordinator}. */\npublic class HeadOperatorCoordinatorTest extends TestLogger {\n\n", "meta": {"hash_id": "f789e275a30429df1b19dfd2eff39242f74f6b82d1848832f59a96b627cef83a"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 2, "content": " @Test(timeout = 60000L)\n public void testForwardEvents() throws Exception {\n IterationID iterationId = new IterationID();\n List operatorIds = Arrays.asList(new OperatorID(), new OperatorID());\n List parallelisms = Arrays.asList(2, 3);\n List receivingTasks =\n Arrays.asList(\n EventReceivingTasks.createForRunningTasks(),\n EventReceivingTasks.createForRunningTasks());\n List coordinators = new ArrayList<>();\n\n", "meta": {"hash_id": "492cc26920ed511a9d849583b8711ea0680d8efb0cf1693258a52b44a3784c11"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 3, "content": " int totalParallelism = parallelisms.stream().mapToInt(i -> i).sum();\n\n for (int i = 0; i < operatorIds.size(); ++i) {\n HeadOperatorCoordinator coordinator =\n createCoordinator(iterationId, parallelisms.get(i), totalParallelism);\n setAllSubtasksReady(coordinator, receivingTasks.get(i), parallelisms.get(i));\n coordinators.add(coordinator);\n }\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(2, j, false)));\n checkSentEvent(1, new GloballyAlignedEvent(2, false), receivingTasks, parallelisms);\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(3, 0, false)));\n checkSentEvent(2, new GloballyAlignedEvent(3, true), receivingTasks, parallelisms);\n }\n\n", "meta": {"hash_id": "c9a83f3129471de0327e4e42ffc9ac927aff5d6e38a8cb92addddedaf06fe3c7"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 4, "content": " private HeadOperatorCoordinator createCoordinator(\n IterationID iterationId, int parallelism, int totalHeadParallelism) {\n MockOperatorCoordinatorContext context =\n new MockOperatorCoordinatorContext(new OperatorID(), parallelism);\n return (HeadOperatorCoordinator)\n new HeadOperatorCoordinator.HeadOperatorCoordinatorProvider(\n new OperatorID(), iterationId, totalHeadParallelism)\n .create(context);\n }\n\n private void setAllSubtasksReady(\n HeadOperatorCoordinator coordinator,\n EventReceivingTasks receivingTasks,\n int parallelism) {\n for (int i = 0; i < parallelism; i++) {\n coordinator.executionAttemptReady(i, 0, receivingTasks.createGatewayForSubtask(i, 0));\n }\n }\n\n", "meta": {"hash_id": "a41f3136aa477fee3cdecd404e2796c0c9c652d17a4dd704dc6cbce72d5a3ecf"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 5, "content": " private void receiveEvent(\n List coordinators,\n List parallelisms,\n BiFunction> eventFactory)\n throws Exception {\n for (int i = 0; i < coordinators.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n List events = eventFactory.apply(i, j);\n for (OperatorEvent event : events) {\n coordinators.get(i).handleEventFromOperator(j, 0, event);\n }\n }\n }\n }\n\n", "meta": {"hash_id": "4fdf52ba188780f85b0ad42cf5d6a857926826cbd4b7447a4e9261014d784802"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 6, "content": " private void checkSentEvent(\n int expectedNumEvents,\n GloballyAlignedEvent expectedLastEvent,\n List receivingTasks,\n List parallelisms)\n throws InterruptedException {\n for (int i = 0; i < parallelisms.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n while (true) {\n List events = receivingTasks.get(i).getSentEventsForSubtask(j);\n if (events.size() < expectedNumEvents) {\n Thread.sleep(50);\n continue;\n }\n\n assertEquals(expectedLastEvent, events.get(events.size() - 1));\n break;\n }\n }\n }\n }\n}\n", "meta": {"hash_id": "66e55d0a1dfd4fe43d49fdffd2262f155e19b4aaf4c49e48c18b6c0925a65218"}}]}], "golden_chunks": [{"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 2, "content": " @Test(timeout = 60000L)\n public void testForwardEvents() throws Exception {\n IterationID iterationId = new IterationID();\n List operatorIds = Arrays.asList(new OperatorID(), new OperatorID());\n List parallelisms = Arrays.asList(2, 3);\n List receivingTasks =\n Arrays.asList(\n EventReceivingTasks.createForRunningTasks(),\n EventReceivingTasks.createForRunningTasks());\n List coordinators = new ArrayList<>();\n\n", "meta": {"hash_id": "492cc26920ed511a9d849583b8711ea0680d8efb0cf1693258a52b44a3784c11"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 1, "content": "package org.apache.flink.iteration.operator.coordinator;\n\nimport org.apache.flink.iteration.IterationID;\nimport org.apache.flink.iteration.operator.event.GloballyAlignedEvent;\nimport org.apache.flink.iteration.operator.event.SubtaskAlignedEvent;\nimport org.apache.flink.runtime.jobgraph.OperatorID;\nimport org.apache.flink.runtime.operators.coordination.EventReceivingTasks;\nimport org.apache.flink.runtime.operators.coordination.MockOperatorCoordinatorContext;\nimport org.apache.flink.runtime.operators.coordination.OperatorEvent;\nimport org.apache.flink.util.TestLogger;\n\nimport org.junit.Test;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.function.BiFunction;\n\nimport static org.junit.Assert.assertEquals;\n\n/** Tests the behavior of {@link HeadOperatorCoordinator}. */\npublic class HeadOperatorCoordinatorTest extends TestLogger {\n\n", "meta": {"hash_id": "f789e275a30429df1b19dfd2eff39242f74f6b82d1848832f59a96b627cef83a"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-iteration/flink-ml-iteration-common/src/test/java/org/apache/flink/iteration/operator/coordinator/HeadOperatorCoordinatorTest.java", "repo_name": "apache/flink-ml", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "How does the HeadOperatorCoordinator determine when to send out a GloballyAlignedEvent?", "answer": "The test shows that the HeadOperatorCoordinator sends a GloballyAlignedEvent after it has received a SubtaskAlignedEvent from every parallel subtask. Specifically, it first sends a SubtaskAlignedEvent with epoch 2 from one subtask of each coordinator. After this, the test verifies a single GloballyAlignedEvent with epoch 2 was sent out. It then sends a SubtaskAlignedEvent with epoch 3 from just one subtask, and verifies a GloballyAlignedEvent with epoch 3 is sent out, but with the \"endOfEpoch\" flag set to true, likely indicating not all subtasks participated in epoch 3.", "golden_doc_uuids": ["e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320"], "golden_chunk_uuids": [["e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", 3]], "golden_documents": [{"uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.iteration.operator.coordinator;\n\nimport org.apache.flink.iteration.IterationID;\nimport org.apache.flink.iteration.operator.event.GloballyAlignedEvent;\nimport org.apache.flink.iteration.operator.event.SubtaskAlignedEvent;\nimport org.apache.flink.runtime.jobgraph.OperatorID;\nimport org.apache.flink.runtime.operators.coordination.EventReceivingTasks;\nimport org.apache.flink.runtime.operators.coordination.MockOperatorCoordinatorContext;\nimport org.apache.flink.runtime.operators.coordination.OperatorEvent;\nimport org.apache.flink.util.TestLogger;\n\nimport org.junit.Test;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.function.BiFunction;\n\nimport static org.junit.Assert.assertEquals;\n\n/** Tests the behavior of {@link HeadOperatorCoordinator}. */\npublic class HeadOperatorCoordinatorTest extends TestLogger {\n\n @Test(timeout = 60000L)\n public void testForwardEvents() throws Exception {\n IterationID iterationId = new IterationID();\n List operatorIds = Arrays.asList(new OperatorID(), new OperatorID());\n List parallelisms = Arrays.asList(2, 3);\n List receivingTasks =\n Arrays.asList(\n EventReceivingTasks.createForRunningTasks(),\n EventReceivingTasks.createForRunningTasks());\n List coordinators = new ArrayList<>();\n\n int totalParallelism = parallelisms.stream().mapToInt(i -> i).sum();\n\n for (int i = 0; i < operatorIds.size(); ++i) {\n HeadOperatorCoordinator coordinator =\n createCoordinator(iterationId, parallelisms.get(i), totalParallelism);\n setAllSubtasksReady(coordinator, receivingTasks.get(i), parallelisms.get(i));\n coordinators.add(coordinator);\n }\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(2, j, false)));\n checkSentEvent(1, new GloballyAlignedEvent(2, false), receivingTasks, parallelisms);\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(3, 0, false)));\n checkSentEvent(2, new GloballyAlignedEvent(3, true), receivingTasks, parallelisms);\n }\n\n private HeadOperatorCoordinator createCoordinator(\n IterationID iterationId, int parallelism, int totalHeadParallelism) {\n MockOperatorCoordinatorContext context =\n new MockOperatorCoordinatorContext(new OperatorID(), parallelism);\n return (HeadOperatorCoordinator)\n new HeadOperatorCoordinator.HeadOperatorCoordinatorProvider(\n new OperatorID(), iterationId, totalHeadParallelism)\n .create(context);\n }\n\n private void setAllSubtasksReady(\n HeadOperatorCoordinator coordinator,\n EventReceivingTasks receivingTasks,\n int parallelism) {\n for (int i = 0; i < parallelism; i++) {\n coordinator.executionAttemptReady(i, 0, receivingTasks.createGatewayForSubtask(i, 0));\n }\n }\n\n private void receiveEvent(\n List coordinators,\n List parallelisms,\n BiFunction> eventFactory)\n throws Exception {\n for (int i = 0; i < coordinators.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n List events = eventFactory.apply(i, j);\n for (OperatorEvent event : events) {\n coordinators.get(i).handleEventFromOperator(j, 0, event);\n }\n }\n }\n }\n\n private void checkSentEvent(\n int expectedNumEvents,\n GloballyAlignedEvent expectedLastEvent,\n List receivingTasks,\n List parallelisms)\n throws InterruptedException {\n for (int i = 0; i < parallelisms.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n while (true) {\n List events = receivingTasks.get(i).getSentEventsForSubtask(j);\n if (events.size() < expectedNumEvents) {\n Thread.sleep(50);\n continue;\n }\n\n assertEquals(expectedLastEvent, events.get(events.size() - 1));\n break;\n }\n }\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-iteration/flink-ml-iteration-common/src/test/java/org/apache/flink/iteration/operator/coordinator/HeadOperatorCoordinatorTest.java", "repo_name": "apache/flink-ml", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "9b9a8e3902f066ec72ab2210eabb6321938ff586eb4f81aa28c4f8b1e5017e95"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 1, "content": "package org.apache.flink.iteration.operator.coordinator;\n\nimport org.apache.flink.iteration.IterationID;\nimport org.apache.flink.iteration.operator.event.GloballyAlignedEvent;\nimport org.apache.flink.iteration.operator.event.SubtaskAlignedEvent;\nimport org.apache.flink.runtime.jobgraph.OperatorID;\nimport org.apache.flink.runtime.operators.coordination.EventReceivingTasks;\nimport org.apache.flink.runtime.operators.coordination.MockOperatorCoordinatorContext;\nimport org.apache.flink.runtime.operators.coordination.OperatorEvent;\nimport org.apache.flink.util.TestLogger;\n\nimport org.junit.Test;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.function.BiFunction;\n\nimport static org.junit.Assert.assertEquals;\n\n/** Tests the behavior of {@link HeadOperatorCoordinator}. */\npublic class HeadOperatorCoordinatorTest extends TestLogger {\n\n", "meta": {"hash_id": "f789e275a30429df1b19dfd2eff39242f74f6b82d1848832f59a96b627cef83a"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 2, "content": " @Test(timeout = 60000L)\n public void testForwardEvents() throws Exception {\n IterationID iterationId = new IterationID();\n List operatorIds = Arrays.asList(new OperatorID(), new OperatorID());\n List parallelisms = Arrays.asList(2, 3);\n List receivingTasks =\n Arrays.asList(\n EventReceivingTasks.createForRunningTasks(),\n EventReceivingTasks.createForRunningTasks());\n List coordinators = new ArrayList<>();\n\n", "meta": {"hash_id": "492cc26920ed511a9d849583b8711ea0680d8efb0cf1693258a52b44a3784c11"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 3, "content": " int totalParallelism = parallelisms.stream().mapToInt(i -> i).sum();\n\n for (int i = 0; i < operatorIds.size(); ++i) {\n HeadOperatorCoordinator coordinator =\n createCoordinator(iterationId, parallelisms.get(i), totalParallelism);\n setAllSubtasksReady(coordinator, receivingTasks.get(i), parallelisms.get(i));\n coordinators.add(coordinator);\n }\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(2, j, false)));\n checkSentEvent(1, new GloballyAlignedEvent(2, false), receivingTasks, parallelisms);\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(3, 0, false)));\n checkSentEvent(2, new GloballyAlignedEvent(3, true), receivingTasks, parallelisms);\n }\n\n", "meta": {"hash_id": "c9a83f3129471de0327e4e42ffc9ac927aff5d6e38a8cb92addddedaf06fe3c7"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 4, "content": " private HeadOperatorCoordinator createCoordinator(\n IterationID iterationId, int parallelism, int totalHeadParallelism) {\n MockOperatorCoordinatorContext context =\n new MockOperatorCoordinatorContext(new OperatorID(), parallelism);\n return (HeadOperatorCoordinator)\n new HeadOperatorCoordinator.HeadOperatorCoordinatorProvider(\n new OperatorID(), iterationId, totalHeadParallelism)\n .create(context);\n }\n\n private void setAllSubtasksReady(\n HeadOperatorCoordinator coordinator,\n EventReceivingTasks receivingTasks,\n int parallelism) {\n for (int i = 0; i < parallelism; i++) {\n coordinator.executionAttemptReady(i, 0, receivingTasks.createGatewayForSubtask(i, 0));\n }\n }\n\n", "meta": {"hash_id": "a41f3136aa477fee3cdecd404e2796c0c9c652d17a4dd704dc6cbce72d5a3ecf"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 5, "content": " private void receiveEvent(\n List coordinators,\n List parallelisms,\n BiFunction> eventFactory)\n throws Exception {\n for (int i = 0; i < coordinators.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n List events = eventFactory.apply(i, j);\n for (OperatorEvent event : events) {\n coordinators.get(i).handleEventFromOperator(j, 0, event);\n }\n }\n }\n }\n\n", "meta": {"hash_id": "4fdf52ba188780f85b0ad42cf5d6a857926826cbd4b7447a4e9261014d784802"}}, {"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 6, "content": " private void checkSentEvent(\n int expectedNumEvents,\n GloballyAlignedEvent expectedLastEvent,\n List receivingTasks,\n List parallelisms)\n throws InterruptedException {\n for (int i = 0; i < parallelisms.size(); ++i) {\n for (int j = 0; j < parallelisms.get(i); ++j) {\n while (true) {\n List events = receivingTasks.get(i).getSentEventsForSubtask(j);\n if (events.size() < expectedNumEvents) {\n Thread.sleep(50);\n continue;\n }\n\n assertEquals(expectedLastEvent, events.get(events.size() - 1));\n break;\n }\n }\n }\n }\n}\n", "meta": {"hash_id": "66e55d0a1dfd4fe43d49fdffd2262f155e19b4aaf4c49e48c18b6c0925a65218"}}]}], "golden_chunks": [{"doc_uuid": "e76b84e8438dda17b4ce9cea1c2966171c7bf77397a4036cede90ee5de345320", "index": 3, "content": " int totalParallelism = parallelisms.stream().mapToInt(i -> i).sum();\n\n for (int i = 0; i < operatorIds.size(); ++i) {\n HeadOperatorCoordinator coordinator =\n createCoordinator(iterationId, parallelisms.get(i), totalParallelism);\n setAllSubtasksReady(coordinator, receivingTasks.get(i), parallelisms.get(i));\n coordinators.add(coordinator);\n }\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(2, j, false)));\n checkSentEvent(1, new GloballyAlignedEvent(2, false), receivingTasks, parallelisms);\n\n receiveEvent(\n coordinators,\n parallelisms,\n (i, j) -> Collections.singletonList(new SubtaskAlignedEvent(3, 0, false)));\n checkSentEvent(2, new GloballyAlignedEvent(3, true), receivingTasks, parallelisms);\n }\n\n", "meta": {"hash_id": "c9a83f3129471de0327e4e42ffc9ac927aff5d6e38a8cb92addddedaf06fe3c7"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-iteration/flink-ml-iteration-common/src/test/java/org/apache/flink/iteration/operator/coordinator/HeadOperatorCoordinatorTest.java", "repo_name": "apache/flink-ml", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "How is the input data generated in this example?", "answer": "The input data is generated using the following code:\n\nDataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\nTable inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\nIt creates a DataStream of Rows, each containing an array of Strings. The DataStream is then converted into a Table named \"inputTable\" with a single column \"input\".", "golden_doc_uuids": ["b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e"], "golden_chunk_uuids": [["b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", 2]], "golden_documents": [{"uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n // Creates a StopWordsRemover object and initializes its parameters.\n StopWordsRemover remover =\n new StopWordsRemover().setInputCols(\"input\").setOutputCols(\"output\");\n\n // Uses the StopWordsRemover object for feature transformations.\n Table outputTable = remover.transform(inputTable)[0];\n\n // Extracts and displays the results.\n for (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-examples/src/main/java/org/apache/flink/ml/examples/feature/StopWordsRemoverExample.java", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "9b29ee32fadcc73ea0cd935f0ee2aba1db0962b8fcfff68efe6be121fdbc7ff0"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 1, "content": "package org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n", "meta": {"hash_id": "8deeb202577f9a67382c29cef5d15b47932110bdd98264ec4287eb9edef7cb2c"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 2, "content": " // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n", "meta": {"hash_id": "2b5941d9fc3330accd6393c38c2fdbd8c21692389afc597edcd59a030985a214"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 3, "content": " // Creates a StopWordsRemover object and initializes its parameters.\n StopWordsRemover remover =\n new StopWordsRemover().setInputCols(\"input\").setOutputCols(\"output\");\n\n // Uses the StopWordsRemover object for feature transformations.\n Table outputTable = remover.transform(inputTable)[0];\n\n // Extracts and displays the results.\n for (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n }\n}\n", "meta": {"hash_id": "de7f851aff45fbfc21cb328d4e865323843fee64ad145099ed2c182bc2dbee03"}}]}], "golden_chunks": [{"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 2, "content": " // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n", "meta": {"hash_id": "2b5941d9fc3330accd6393c38c2fdbd8c21692389afc597edcd59a030985a214"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-examples/src/main/java/org/apache/flink/ml/examples/feature/StopWordsRemoverExample.java", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "How are the results extracted and displayed in this example?", "answer": "The results are extracted and displayed using this code:\n\nfor (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n\nIt iterates over each Row in the \"outputTable\", extracting the \"input\" and \"output\" column values as String arrays. It then prints out the input and output values for each row.", "golden_doc_uuids": ["b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e"], "golden_chunk_uuids": [["b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", 2], ["b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", 1], ["b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", 0]], "golden_documents": [{"uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n // Creates a StopWordsRemover object and initializes its parameters.\n StopWordsRemover remover =\n new StopWordsRemover().setInputCols(\"input\").setOutputCols(\"output\");\n\n // Uses the StopWordsRemover object for feature transformations.\n Table outputTable = remover.transform(inputTable)[0];\n\n // Extracts and displays the results.\n for (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-examples/src/main/java/org/apache/flink/ml/examples/feature/StopWordsRemoverExample.java", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "9b29ee32fadcc73ea0cd935f0ee2aba1db0962b8fcfff68efe6be121fdbc7ff0"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 1, "content": "package org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n", "meta": {"hash_id": "8deeb202577f9a67382c29cef5d15b47932110bdd98264ec4287eb9edef7cb2c"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 2, "content": " // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n", "meta": {"hash_id": "2b5941d9fc3330accd6393c38c2fdbd8c21692389afc597edcd59a030985a214"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 3, "content": " // Creates a StopWordsRemover object and initializes its parameters.\n StopWordsRemover remover =\n new StopWordsRemover().setInputCols(\"input\").setOutputCols(\"output\");\n\n // Uses the StopWordsRemover object for feature transformations.\n Table outputTable = remover.transform(inputTable)[0];\n\n // Extracts and displays the results.\n for (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n }\n}\n", "meta": {"hash_id": "de7f851aff45fbfc21cb328d4e865323843fee64ad145099ed2c182bc2dbee03"}}]}], "golden_chunks": [{"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 2, "content": " // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n", "meta": {"hash_id": "2b5941d9fc3330accd6393c38c2fdbd8c21692389afc597edcd59a030985a214"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 1, "content": "package org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n", "meta": {"hash_id": "8deeb202577f9a67382c29cef5d15b47932110bdd98264ec4287eb9edef7cb2c"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "9b29ee32fadcc73ea0cd935f0ee2aba1db0962b8fcfff68efe6be121fdbc7ff0"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-examples/src/main/java/org/apache/flink/ml/examples/feature/StopWordsRemoverExample.java", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "What execution environment is used in this example?", "answer": "This example uses the StreamExecutionEnvironment, as seen in this line:\n\nStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n\nIt gets the execution environment for running the stream processing job.", "golden_doc_uuids": ["b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e"], "golden_chunk_uuids": [["b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", 1]], "golden_documents": [{"uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n // Creates a StopWordsRemover object and initializes its parameters.\n StopWordsRemover remover =\n new StopWordsRemover().setInputCols(\"input\").setOutputCols(\"output\");\n\n // Uses the StopWordsRemover object for feature transformations.\n Table outputTable = remover.transform(inputTable)[0];\n\n // Extracts and displays the results.\n for (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n }\n}\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-examples/src/main/java/org/apache/flink/ml/examples/feature/StopWordsRemoverExample.java", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "9b29ee32fadcc73ea0cd935f0ee2aba1db0962b8fcfff68efe6be121fdbc7ff0"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 1, "content": "package org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n", "meta": {"hash_id": "8deeb202577f9a67382c29cef5d15b47932110bdd98264ec4287eb9edef7cb2c"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 2, "content": " // Generates input data.\n DataStream inputStream =\n env.fromElements(\n Row.of((Object) new String[] {\"test\", \"test\"}),\n Row.of((Object) new String[] {\"a\", \"b\", \"c\", \"d\"}),\n Row.of((Object) new String[] {\"a\", \"the\", \"an\"}),\n Row.of((Object) new String[] {\"A\", \"The\", \"AN\"}),\n Row.of((Object) new String[] {null}),\n Row.of((Object) new String[] {}));\n Table inputTable = tEnv.fromDataStream(inputStream).as(\"input\");\n\n", "meta": {"hash_id": "2b5941d9fc3330accd6393c38c2fdbd8c21692389afc597edcd59a030985a214"}}, {"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 3, "content": " // Creates a StopWordsRemover object and initializes its parameters.\n StopWordsRemover remover =\n new StopWordsRemover().setInputCols(\"input\").setOutputCols(\"output\");\n\n // Uses the StopWordsRemover object for feature transformations.\n Table outputTable = remover.transform(inputTable)[0];\n\n // Extracts and displays the results.\n for (CloseableIterator it = outputTable.execute().collect(); it.hasNext(); ) {\n Row row = it.next();\n\n String[] inputValues = row.getFieldAs(\"input\");\n String[] outputValues = row.getFieldAs(\"output\");\n\n System.out.printf(\n \"Input Values: %s\\tOutput Values: %s\\n\",\n Arrays.toString(inputValues), Arrays.toString(outputValues));\n }\n }\n}\n", "meta": {"hash_id": "de7f851aff45fbfc21cb328d4e865323843fee64ad145099ed2c182bc2dbee03"}}]}], "golden_chunks": [{"doc_uuid": "b5a7a2b677027616a70919913ddc49d329066ba2aa5c266fd47239c36afe5c1e", "index": 1, "content": "package org.apache.flink.ml.examples.feature;\n\nimport org.apache.flink.ml.feature.stopwordsremover.StopWordsRemover;\nimport org.apache.flink.streaming.api.datastream.DataStream;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.table.api.Table;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.types.Row;\nimport org.apache.flink.util.CloseableIterator;\n\nimport java.util.Arrays;\n\n/** Simple program that creates a StopWordsRemover instance and uses it for feature engineering. */\npublic class StopWordsRemoverExample {\n public static void main(String[] args) {\n StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();\n StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);\n\n", "meta": {"hash_id": "8deeb202577f9a67382c29cef5d15b47932110bdd98264ec4287eb9edef7cb2c"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-examples/src/main/java/org/apache/flink/ml/examples/feature/StopWordsRemoverExample.java", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "How do you create an IndexToStringModel instance?", "answer": "To create an IndexToStringModel instance, you can use the constructor IndexToStringModel(). For example:\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n", "golden_doc_uuids": ["85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107"], "golden_chunk_uuids": [["85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", 2]], "golden_documents": [{"uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n# Simple program that creates an IndexToStringModel instance and uses it\n# for feature engineering.\n\nfrom pyflink.common import Types\nfrom pyflink.datastream import StreamExecutionEnvironment\nfrom pyflink.ml.feature.stringindexer import IndexToStringModel\nfrom pyflink.table import StreamTableEnvironment\n\n# create a new StreamExecutionEnvironment\nenv = StreamExecutionEnvironment.get_execution_environment()\n\n# create a StreamTableEnvironment\nt_env = StreamTableEnvironment.create(env)\n\n# generate input data\npredict_table = t_env.from_data_stream(\n env.from_collection([\n (0, 3),\n (1, 2),\n ],\n type_info=Types.ROW_NAMED(\n ['input_col1', 'input_col2'],\n [Types.INT(), Types.INT()])\n ))\n\n# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/examples/ml/feature/indextostringmodel_example.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 0, "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n", "meta": {"hash_id": "f07555e7f0a67f3edc14594d9ed305a844c369507858c75d620aa8f81765aa6f"}}, {"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 1, "content": "# Simple program that creates an IndexToStringModel instance and uses it\n# for feature engineering.\n\nfrom pyflink.common import Types\nfrom pyflink.datastream import StreamExecutionEnvironment\nfrom pyflink.ml.feature.stringindexer import IndexToStringModel\nfrom pyflink.table import StreamTableEnvironment\n\n# create a new StreamExecutionEnvironment\nenv = StreamExecutionEnvironment.get_execution_environment()\n\n# create a StreamTableEnvironment\nt_env = StreamTableEnvironment.create(env)\n\n# generate input data\npredict_table = t_env.from_data_stream(\n env.from_collection([\n (0, 3),\n (1, 2),\n ],\n type_info=Types.ROW_NAMED(\n ['input_col1', 'input_col2'],\n [Types.INT(), Types.INT()])\n ))\n\n", "meta": {"hash_id": "f0704482e80c29296b1c1dbb100ebbc5d3c0d981889415ff47ea549f136d8bdf"}}, {"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 2, "content": "# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n", "meta": {"hash_id": "27ec9aff5777ae5e04149e8f907edd15ac163163ffbe990e2ee7611007c611f1"}}, {"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 3, "content": "# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n", "meta": {"hash_id": "95086dd8a786df6e85e202538db071ae7c00517370b956af1006ff423132a0fe"}}]}], "golden_chunks": [{"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 2, "content": "# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n", "meta": {"hash_id": "27ec9aff5777ae5e04149e8f907edd15ac163163ffbe990e2ee7611007c611f1"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/examples/ml/feature/indextostringmodel_example.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "How do you set the model data for the IndexToStringModel?", "answer": "You can use the set_model_data() method of the IndexToStringModel to set the model data. The model data should be provided as a table. For example:\n\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_model_data(model_data_table)\n", "golden_doc_uuids": ["85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107"], "golden_chunk_uuids": [["85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", 2]], "golden_documents": [{"uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n# Simple program that creates an IndexToStringModel instance and uses it\n# for feature engineering.\n\nfrom pyflink.common import Types\nfrom pyflink.datastream import StreamExecutionEnvironment\nfrom pyflink.ml.feature.stringindexer import IndexToStringModel\nfrom pyflink.table import StreamTableEnvironment\n\n# create a new StreamExecutionEnvironment\nenv = StreamExecutionEnvironment.get_execution_environment()\n\n# create a StreamTableEnvironment\nt_env = StreamTableEnvironment.create(env)\n\n# generate input data\npredict_table = t_env.from_data_stream(\n env.from_collection([\n (0, 3),\n (1, 2),\n ],\n type_info=Types.ROW_NAMED(\n ['input_col1', 'input_col2'],\n [Types.INT(), Types.INT()])\n ))\n\n# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/examples/ml/feature/indextostringmodel_example.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 0, "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n", "meta": {"hash_id": "f07555e7f0a67f3edc14594d9ed305a844c369507858c75d620aa8f81765aa6f"}}, {"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 1, "content": "# Simple program that creates an IndexToStringModel instance and uses it\n# for feature engineering.\n\nfrom pyflink.common import Types\nfrom pyflink.datastream import StreamExecutionEnvironment\nfrom pyflink.ml.feature.stringindexer import IndexToStringModel\nfrom pyflink.table import StreamTableEnvironment\n\n# create a new StreamExecutionEnvironment\nenv = StreamExecutionEnvironment.get_execution_environment()\n\n# create a StreamTableEnvironment\nt_env = StreamTableEnvironment.create(env)\n\n# generate input data\npredict_table = t_env.from_data_stream(\n env.from_collection([\n (0, 3),\n (1, 2),\n ],\n type_info=Types.ROW_NAMED(\n ['input_col1', 'input_col2'],\n [Types.INT(), Types.INT()])\n ))\n\n", "meta": {"hash_id": "f0704482e80c29296b1c1dbb100ebbc5d3c0d981889415ff47ea549f136d8bdf"}}, {"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 2, "content": "# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n", "meta": {"hash_id": "27ec9aff5777ae5e04149e8f907edd15ac163163ffbe990e2ee7611007c611f1"}}, {"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 3, "content": "# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n", "meta": {"hash_id": "95086dd8a786df6e85e202538db071ae7c00517370b956af1006ff423132a0fe"}}]}], "golden_chunks": [{"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 2, "content": "# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n", "meta": {"hash_id": "27ec9aff5777ae5e04149e8f907edd15ac163163ffbe990e2ee7611007c611f1"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/examples/ml/feature/indextostringmodel_example.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "How do you extract and display the results after applying the IndexToStringModel?", "answer": "To extract and display the results after applying the IndexToStringModel, you can:\n1. Get the field names of the output table using output.get_schema().get_field_names().\n2. Create lists to store the input and output values.\n3. Iterate over the results using t_env.to_data_stream(output).execute_and_collect().\n4. For each result, extract the input and output values using the field names and index.\n5. Print the input and output values.\n\nFor example:\n\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n", "golden_doc_uuids": ["85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107"], "golden_chunk_uuids": [["85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", 3]], "golden_documents": [{"uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n# Simple program that creates an IndexToStringModel instance and uses it\n# for feature engineering.\n\nfrom pyflink.common import Types\nfrom pyflink.datastream import StreamExecutionEnvironment\nfrom pyflink.ml.feature.stringindexer import IndexToStringModel\nfrom pyflink.table import StreamTableEnvironment\n\n# create a new StreamExecutionEnvironment\nenv = StreamExecutionEnvironment.get_execution_environment()\n\n# create a StreamTableEnvironment\nt_env = StreamTableEnvironment.create(env)\n\n# generate input data\npredict_table = t_env.from_data_stream(\n env.from_collection([\n (0, 3),\n (1, 2),\n ],\n type_info=Types.ROW_NAMED(\n ['input_col1', 'input_col2'],\n [Types.INT(), Types.INT()])\n ))\n\n# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n", "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/examples/ml/feature/indextostringmodel_example.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 0, "content": "################################################################################\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n################################################################################\n\n", "meta": {"hash_id": "f07555e7f0a67f3edc14594d9ed305a844c369507858c75d620aa8f81765aa6f"}}, {"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 1, "content": "# Simple program that creates an IndexToStringModel instance and uses it\n# for feature engineering.\n\nfrom pyflink.common import Types\nfrom pyflink.datastream import StreamExecutionEnvironment\nfrom pyflink.ml.feature.stringindexer import IndexToStringModel\nfrom pyflink.table import StreamTableEnvironment\n\n# create a new StreamExecutionEnvironment\nenv = StreamExecutionEnvironment.get_execution_environment()\n\n# create a StreamTableEnvironment\nt_env = StreamTableEnvironment.create(env)\n\n# generate input data\npredict_table = t_env.from_data_stream(\n env.from_collection([\n (0, 3),\n (1, 2),\n ],\n type_info=Types.ROW_NAMED(\n ['input_col1', 'input_col2'],\n [Types.INT(), Types.INT()])\n ))\n\n", "meta": {"hash_id": "f0704482e80c29296b1c1dbb100ebbc5d3c0d981889415ff47ea549f136d8bdf"}}, {"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 2, "content": "# create an index-to-string model and initialize its parameters and model data\nmodel_data_table = t_env.from_data_stream(\n env.from_collection([\n ([['a', 'b', 'c', 'd'], [-1., 0., 1., 2.]],),\n ],\n type_info=Types.ROW_NAMED(\n ['stringArrays'],\n [Types.OBJECT_ARRAY(Types.OBJECT_ARRAY(Types.STRING()))])\n ))\n\nmodel = IndexToStringModel() \\\n .set_input_cols('input_col1', 'input_col2') \\\n .set_output_cols('output_col1', 'output_col2') \\\n .set_model_data(model_data_table)\n\n# use the index-to-string model for feature engineering\noutput = model.transform(predict_table)[0]\n\n", "meta": {"hash_id": "27ec9aff5777ae5e04149e8f907edd15ac163163ffbe990e2ee7611007c611f1"}}, {"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 3, "content": "# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n", "meta": {"hash_id": "95086dd8a786df6e85e202538db071ae7c00517370b956af1006ff423132a0fe"}}]}], "golden_chunks": [{"doc_uuid": "85b5dc3fe7b963e62c701c7f73ba8e8de3a2b576b31a75d9024c4e529b9b4107", "index": 3, "content": "# extract and display the results\nfield_names = output.get_schema().get_field_names()\ninput_values = [None for _ in model.get_input_cols()]\noutput_values = [None for _ in model.get_input_cols()]\nfor result in t_env.to_data_stream(output).execute_and_collect():\n for i in range(len(model.get_input_cols())):\n input_values[i] = result[field_names.index(model.get_input_cols()[i])]\n output_values[i] = result[field_names.index(model.get_output_cols()[i])]\n print('Input Values: ' + str(input_values) + '\\tOutput Values: ' + str(output_values))\n", "meta": {"hash_id": "95086dd8a786df6e85e202538db071ae7c00517370b956af1006ff423132a0fe"}}], "meta": {"doctype": "codebase", "relative_path": "/flink-ml-python/pyflink/examples/ml/feature/indextostringmodel_example.py", "repo_name": "apache/flink-ml", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "How do you create a new instance of the ColorEndPatternConverter?", "answer": "A new instance of ColorEndPatternConverter can be created by calling the static newInstance() method which returns a shared pointer to a singleton instance:\n\nstatic WideLife instance = std::make_shared();\nreturn instance;", "golden_doc_uuids": ["d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0"], "golden_chunk_uuids": [["d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", 2]], "golden_documents": [{"uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\nPatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/colorendpatternconverter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "1e954389b7f30912fba3ae6e67ba5608842c2ecbd39929c7178fe7cc9706587e"}}, {"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 1, "content": "#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\n", "meta": {"hash_id": "1b95fbc98b28d653ea3477022fa05707ad565c4e66e4b57075ecf5605e0827d6"}}, {"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 2, "content": "PatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n", "meta": {"hash_id": "7ce3fc8e19ddcb547bd4c777412284cd67dec2e39578554b7030416d9be9e3aa"}}]}], "golden_chunks": [{"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 2, "content": "PatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n", "meta": {"hash_id": "7ce3fc8e19ddcb547bd4c777412284cd67dec2e39578554b7030416d9be9e3aa"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/colorendpatternconverter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What parameters does the ColorEndPatternConverter constructor take?", "answer": "The ColorEndPatternConverter constructor takes no parameters. It calls the base LoggingEventPatternConverter constructor passing the literal strings \"Color End\" and \"colorEnd\":\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n LoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n LOG4CXX_STR(\"colorEnd\"))\n{\n}", "golden_doc_uuids": ["d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0"], "golden_chunk_uuids": [["d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", 0]], "golden_documents": [{"uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\nPatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/colorendpatternconverter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "1e954389b7f30912fba3ae6e67ba5608842c2ecbd39929c7178fe7cc9706587e"}}, {"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 1, "content": "#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\n", "meta": {"hash_id": "1b95fbc98b28d653ea3477022fa05707ad565c4e66e4b57075ecf5605e0827d6"}}, {"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 2, "content": "PatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n", "meta": {"hash_id": "7ce3fc8e19ddcb547bd4c777412284cd67dec2e39578554b7030416d9be9e3aa"}}]}], "golden_chunks": [{"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "1e954389b7f30912fba3ae6e67ba5608842c2ecbd39929c7178fe7cc9706587e"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/colorendpatternconverter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What namespaces are used in this file?", "answer": "This file uses the following namespaces:\nusing namespace LOG4CXX_NS; \nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;", "golden_doc_uuids": ["d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0"], "golden_chunk_uuids": [["d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", 1]], "golden_documents": [{"uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\nPatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/colorendpatternconverter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "1e954389b7f30912fba3ae6e67ba5608842c2ecbd39929c7178fe7cc9706587e"}}, {"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 1, "content": "#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\n", "meta": {"hash_id": "1b95fbc98b28d653ea3477022fa05707ad565c4e66e4b57075ecf5605e0827d6"}}, {"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 2, "content": "PatternConverterPtr ColorEndPatternConverter::newInstance(\n\tconst std::vector& /* options */)\n{\n\tstatic WideLife instance = std::make_shared();\n\treturn instance;\n}\n\nvoid ColorEndPatternConverter::format(\n\tconst LoggingEventPtr& event,\n\tLogString& toAppendTo,\n\tPool& p) const\n{\n\n\t// Reset all colors on the output(code 0)\n\t// Code 39 would be to reset colors only\n\ttoAppendTo.append(LOG4CXX_STR(\"\\x1B[0m\"));\n}\n", "meta": {"hash_id": "7ce3fc8e19ddcb547bd4c777412284cd67dec2e39578554b7030416d9be9e3aa"}}]}], "golden_chunks": [{"doc_uuid": "d55e0c184149c0c3a0f23bc11ffd11a958dfb9d8a71ba5ac573cba7d1ab37da0", "index": 1, "content": "#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\nusing namespace LOG4CXX_NS::pattern;\nusing namespace LOG4CXX_NS::spi;\nusing namespace LOG4CXX_NS::helpers;\n\nIMPLEMENT_LOG4CXX_OBJECT(ColorEndPatternConverter)\n\nColorEndPatternConverter::ColorEndPatternConverter() :\n\tLoggingEventPatternConverter(LOG4CXX_STR(\"Color End\"),\n\t\tLOG4CXX_STR(\"colorEnd\"))\n{\n}\n\n", "meta": {"hash_id": "1b95fbc98b28d653ea3477022fa05707ad565c4e66e4b57075ecf5605e0827d6"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/colorendpatternconverter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What does the XMLFilenameFilter constructor do?", "answer": "The XMLFilenameFilter constructor takes two string parameters: actual and expected. It constructs a pattern string by concatenating \" file=\\\\(.\\\\).*\" with the expected parameter. It then constructs a replacement string by concatenating \" file=\\\\\\\\1\" with the expected parameter. The actual parameter is not used in the constructor body.\nFor example, if expected is \"foo.xml\", the resulting pattern will be \" file=\\\\(.\\\\).*foo.xml\" and the replacement will be \" file=\\\\1foo.xml\".", "golden_doc_uuids": ["5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4"], "golden_chunk_uuids": [["5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", 1]], "golden_documents": [{"uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/util/xmlfilenamefilter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "8510b796d5e975dcb1a043e406be5a080571b2c2b96b0937dfe0949f6f4a9dd6"}}, {"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 1, "content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"hash_id": "b069915566954d33d9a42666c5f3a9e725cb558386c3207760eca3dc2a6f03c9"}}]}], "golden_chunks": [{"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 1, "content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"hash_id": "b069915566954d33d9a42666c5f3a9e725cb558386c3207760eca3dc2a6f03c9"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/util/xmlfilenamefilter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the pattern and replacement strings constructed in the XMLFilenameFilter constructor?", "answer": "The pattern and replacement strings are likely intended to be used for matching and replacing portions of XML filenames. The pattern looks for the substring \" file=\" followed by any single character captured in a group (the \\\\(.\\\\) part), followed by any characters (.*), and ending with the expected filename. \nThe replacement string keeps the \" file=\" and captured single character (referred to by \\\\1), and then appends the expected filename.\nThis suggests the XMLFilenameFilter may be used to normalize or standardize XML filenames in some way, by replacing variable parts of the filename with a fixed expected value.", "golden_doc_uuids": ["5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4"], "golden_chunk_uuids": [["5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", 1]], "golden_documents": [{"uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/util/xmlfilenamefilter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "8510b796d5e975dcb1a043e406be5a080571b2c2b96b0937dfe0949f6f4a9dd6"}}, {"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 1, "content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"hash_id": "b069915566954d33d9a42666c5f3a9e725cb558386c3207760eca3dc2a6f03c9"}}]}], "golden_chunks": [{"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 1, "content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"hash_id": "b069915566954d33d9a42666c5f3a9e725cb558386c3207760eca3dc2a6f03c9"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/util/xmlfilenamefilter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How would the XMLFilenameFilter be used?", "answer": "The XMLFilenameFilter constructor takes an expected filename parameter, so it seems an instance would be constructed by passing in the desired/expected XML filename to match against.\nThe pattern and replacement strings are constructed but not actually used within the provided constructor code. There is a commented out line:\n// patterns.push_back( PatternReplacement(pattern, replacement) );\nThis suggests that in the full implementation, the XMLFilenameFilter class would maintain a list of PatternReplacement objects, and the constructor would add the pattern and replacement it generates to that list.\nThe XMLFilenameFilter instance could then be used to match against XML filenames and replace/normalize them using the expected filename it was constructed with.", "golden_doc_uuids": ["5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4"], "golden_chunk_uuids": [["5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", 1]], "golden_documents": [{"uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/util/xmlfilenamefilter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "8510b796d5e975dcb1a043e406be5a080571b2c2b96b0937dfe0949f6f4a9dd6"}}, {"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 1, "content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"hash_id": "b069915566954d33d9a42666c5f3a9e725cb558386c3207760eca3dc2a6f03c9"}}]}], "golden_chunks": [{"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 1, "content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"hash_id": "b069915566954d33d9a42666c5f3a9e725cb558386c3207760eca3dc2a6f03c9"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/util/xmlfilenamefilter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What C++ standard library classes are used in this code?", "answer": "The code uses std::string from the C++ standard library to store the pattern and replacement strings.", "golden_doc_uuids": ["5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4"], "golden_chunk_uuids": [["5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", 1]], "golden_documents": [{"uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/util/xmlfilenamefilter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "8510b796d5e975dcb1a043e406be5a080571b2c2b96b0937dfe0949f6f4a9dd6"}}, {"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 1, "content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"hash_id": "b069915566954d33d9a42666c5f3a9e725cb558386c3207760eca3dc2a6f03c9"}}]}], "golden_chunks": [{"doc_uuid": "5b1559cf19dd6f22a42968d4489aff3f9df8ae90ba9b002e9d89f9e0f3c981f4", "index": 1, "content": "#include \"xmlfilenamefilter.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nXMLFilenameFilter::XMLFilenameFilter(const std::string& /*actual*/, const std::string& expected)\n{\n\tstd::string pattern(\" file=\\\\(.\\\\).*\");\n\tpattern += expected;\n\n\tstd::string replacement(\" file=\\\\\\\\1\");\n\treplacement += expected;\n\t// patterns.push_back( PatternReplacement(pattern, replacement) );\n}\n", "meta": {"hash_id": "b069915566954d33d9a42666c5f3a9e725cb558386c3207760eca3dc2a6f03c9"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/util/xmlfilenamefilter.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How does the testTrigger unit test work?", "answer": "The testTrigger unit test does the following:\n1. Configures log4cxx using an XML file \"input/xml/smtpAppender1.xml\". This presumably specifies an SMTPAppender with a MockTriggeringEventEvaluator.\n2. Gets a reference to the configured SMTPAppender with the name \"A1\" using Logger::getRootLogger()->getAppender(LOG4CXX_STR(\"A1\")).\n3. Asserts that the appender exists.\n4. Gets the TriggeringEventEvaluator from the appender using appender->getEvaluator().\n5. Asserts that the evaluator exists and is an instance of MockTriggeringEventEvaluator.\nThis test verifies that the triggeringPolicy element in the XML configuration properly sets the specified evaluator implementation on the SMTPAppender.", "golden_doc_uuids": ["968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4"], "golden_chunk_uuids": [["968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", 3], ["968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", 1], ["968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", 0]], "golden_documents": [{"uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#define LOG4CXX_TEST 1\n#include \n\n#include \n#include \"../appenderskeletontestcase.h\"\n#include \n#include \n#include \n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace net\n{\n\nclass MockTriggeringEventEvaluator :\n\tpublic virtual spi::TriggeringEventEvaluator\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(MockTriggeringEventEvaluator)\n\t\tLOG4CXX_CAST_ENTRY(spi::TriggeringEventEvaluator)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tMockTriggeringEventEvaluator()\n\t\t{\n\t\t}\n\n\t\tbool isTriggeringEvent(const spi::LoggingEventPtr& event) override\n\t\t{\n\t\t\treturn true;\n\t\t}\n\tprivate:\n\t\tMockTriggeringEventEvaluator(const MockTriggeringEventEvaluator&);\n\t\tMockTriggeringEventEvaluator& operator=(const MockTriggeringEventEvaluator&);\n};\n}\n}\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\nusing namespace log4cxx::net;\n\nIMPLEMENT_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\n\n/**\n Unit tests of log4cxx::SocketAppender\n */\nclass SMTPAppenderTestCase : public AppenderSkeletonTestCase\n{\n\t\tLOGUNIT_TEST_SUITE(SMTPAppenderTestCase);\n\t\t//\n\t\t// tests inherited from AppenderSkeletonTestCase\n\t\t//\n\t\tLOGUNIT_TEST(testDefaultThreshold);\n\t\tLOGUNIT_TEST(testSetOptionThreshold);\n\t\tLOGUNIT_TEST(testTrigger);\n\t\tLOGUNIT_TEST(testInvalid);\n//#define LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n#ifdef LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n\t\t// This test requires the following environment variables:\n\t\t// LOG4CXX_TEST_EMAIL_RECIPIENT - where the email is sent\n\t\t// LOG4CXX_TEST_SMTP_HOST_NAME - the email server\n\t\tLOGUNIT_TEST(testValid);\n#endif\n\t\tLOGUNIT_TEST_SUITE_END();\n\n\n\tpublic:\n\n\t\tAppenderSkeleton* createAppenderSkeleton() const\n\t\t{\n\t\t\treturn new log4cxx::net::SMTPAppender();\n\t\t}\n\n\t\tvoid setUp()\n\t\t{\n\t\t}\n\n\t\tvoid tearDown()\n\t\t{\n\t\t\tLogManager::resetConfiguration();\n\t\t}\n\n\t\t/**\n\t\t * Tests that triggeringPolicy element will set evaluator.\n\t\t */\n\t\tvoid testTrigger()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppender1.xml\");\n\t\t\tauto appender = log4cxx::cast(Logger::getRootLogger()->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto evaluator = appender->getEvaluator();\n\t\t\tLOGUNIT_ASSERT(evaluator);\n\t\t\tLOGUNIT_ASSERT_EQUAL(true, evaluator->instanceof(MockTriggeringEventEvaluator::getStaticClass()));\n\t\t}\n\n\t\tvoid testInvalid()\n\t\t{\n\t\t\tauto appender = std::make_shared();\n\t\t\tappender->setSMTPHost(LOG4CXX_STR(\"smtp.invalid\"));\n\t\t\tappender->setTo(LOG4CXX_STR(\"you@example.invalid\"));\n\t\t\tappender->setFrom(LOG4CXX_STR(\"me@example.invalid\"));\n\t\t\tappender->setLayout(std::make_shared());\n\t\t\tPool p;\n\t\t\tappender->activateOptions(p);\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\troot->addAppender(appender);\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\");\n\t\t\tLOG4CXX_ERROR(root, \"Sending Message\"); // The DefaultEvaluator should trigger e-mail generation\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(eh->errorReported());\n\t\t}\n\n\n\t\tvoid testValid()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppenderValid.xml\");\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\\n\\nThis paragraph should be preceeded by a blank line.\");\n\n\t\t\tauto appender = log4cxx::cast(root->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(!eh->errorReported());\n\t\t}\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(SMTPAppenderTestCase);\n\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/net/smtpappendertestcase.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "ead03b3d5b76a375b3ebe7f6f08656e560f554ea9e3afd6f048e631e27aa0cf8"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 1, "content": "#define LOG4CXX_TEST 1\n#include \n\n#include \n#include \"../appenderskeletontestcase.h\"\n#include \n#include \n#include \n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace net\n{\n\nclass MockTriggeringEventEvaluator :\n\tpublic virtual spi::TriggeringEventEvaluator\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(MockTriggeringEventEvaluator)\n\t\tLOG4CXX_CAST_ENTRY(spi::TriggeringEventEvaluator)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n", "meta": {"hash_id": "ed097a3759b65e99bdd9bfb12655a490a9c08760c33a7494da9192f68764eded"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 2, "content": "\t\tMockTriggeringEventEvaluator()\n\t\t{\n\t\t}\n\n\t\tbool isTriggeringEvent(const spi::LoggingEventPtr& event) override\n\t\t{\n\t\t\treturn true;\n\t\t}\n\tprivate:\n\t\tMockTriggeringEventEvaluator(const MockTriggeringEventEvaluator&);\n\t\tMockTriggeringEventEvaluator& operator=(const MockTriggeringEventEvaluator&);\n};\n}\n}\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\nusing namespace log4cxx::net;\n\nIMPLEMENT_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\n", "meta": {"hash_id": "c93058ca6e2c021c72ccae9ba4f8c6b76e3e644a1adb16b61505013f331203d6"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 3, "content": "\n/**\n Unit tests of log4cxx::SocketAppender\n */\nclass SMTPAppenderTestCase : public AppenderSkeletonTestCase\n{\n\t\tLOGUNIT_TEST_SUITE(SMTPAppenderTestCase);\n\t\t//\n\t\t// tests inherited from AppenderSkeletonTestCase\n\t\t//\n\t\tLOGUNIT_TEST(testDefaultThreshold);\n\t\tLOGUNIT_TEST(testSetOptionThreshold);\n\t\tLOGUNIT_TEST(testTrigger);\n\t\tLOGUNIT_TEST(testInvalid);\n//#define LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n#ifdef LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n\t\t// This test requires the following environment variables:\n\t\t// LOG4CXX_TEST_EMAIL_RECIPIENT - where the email is sent\n\t\t// LOG4CXX_TEST_SMTP_HOST_NAME - the email server\n\t\tLOGUNIT_TEST(testValid);\n#endif\n\t\tLOGUNIT_TEST_SUITE_END();\n\n", "meta": {"hash_id": "0278226da7236fe5948e0bb3fb35792b780bd73e4875a20e05714218126a56fd"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 4, "content": "\n\tpublic:\n\n\t\tAppenderSkeleton* createAppenderSkeleton() const\n\t\t{\n\t\t\treturn new log4cxx::net::SMTPAppender();\n\t\t}\n\n\t\tvoid setUp()\n\t\t{\n\t\t}\n\n\t\tvoid tearDown()\n\t\t{\n\t\t\tLogManager::resetConfiguration();\n\t\t}\n\n\t\t/**\n\t\t * Tests that triggeringPolicy element will set evaluator.\n\t\t */\n\t\tvoid testTrigger()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppender1.xml\");\n\t\t\tauto appender = log4cxx::cast(Logger::getRootLogger()->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto evaluator = appender->getEvaluator();\n\t\t\tLOGUNIT_ASSERT(evaluator);\n\t\t\tLOGUNIT_ASSERT_EQUAL(true, evaluator->instanceof(MockTriggeringEventEvaluator::getStaticClass()));\n\t\t}\n\n", "meta": {"hash_id": "3360d4d9396805e67223a64be195865fe1cc13b79113d710306bfeb8a7554a46"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 5, "content": "\t\tvoid testInvalid()\n\t\t{\n\t\t\tauto appender = std::make_shared();\n\t\t\tappender->setSMTPHost(LOG4CXX_STR(\"smtp.invalid\"));\n\t\t\tappender->setTo(LOG4CXX_STR(\"you@example.invalid\"));\n\t\t\tappender->setFrom(LOG4CXX_STR(\"me@example.invalid\"));\n\t\t\tappender->setLayout(std::make_shared());\n\t\t\tPool p;\n\t\t\tappender->activateOptions(p);\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\troot->addAppender(appender);\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\");\n\t\t\tLOG4CXX_ERROR(root, \"Sending Message\"); // The DefaultEvaluator should trigger e-mail generation\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(eh->errorReported());\n\t\t}\n\n", "meta": {"hash_id": "ebc0902f7b5de3a7ca3214afef3045852976a89d3ebdbbd8c0c47d84c508d845"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 6, "content": "\n\t\tvoid testValid()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppenderValid.xml\");\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\\n\\nThis paragraph should be preceeded by a blank line.\");\n\n\t\t\tauto appender = log4cxx::cast(root->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(!eh->errorReported());\n\t\t}\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(SMTPAppenderTestCase);\n\n", "meta": {"hash_id": "d0c7045f8cc884a57e5671ab4b31e0548a3e70b6f35d56c5c40681f7dc44e0a1"}}]}], "golden_chunks": [{"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 3, "content": "\n/**\n Unit tests of log4cxx::SocketAppender\n */\nclass SMTPAppenderTestCase : public AppenderSkeletonTestCase\n{\n\t\tLOGUNIT_TEST_SUITE(SMTPAppenderTestCase);\n\t\t//\n\t\t// tests inherited from AppenderSkeletonTestCase\n\t\t//\n\t\tLOGUNIT_TEST(testDefaultThreshold);\n\t\tLOGUNIT_TEST(testSetOptionThreshold);\n\t\tLOGUNIT_TEST(testTrigger);\n\t\tLOGUNIT_TEST(testInvalid);\n//#define LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n#ifdef LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n\t\t// This test requires the following environment variables:\n\t\t// LOG4CXX_TEST_EMAIL_RECIPIENT - where the email is sent\n\t\t// LOG4CXX_TEST_SMTP_HOST_NAME - the email server\n\t\tLOGUNIT_TEST(testValid);\n#endif\n\t\tLOGUNIT_TEST_SUITE_END();\n\n", "meta": {"hash_id": "0278226da7236fe5948e0bb3fb35792b780bd73e4875a20e05714218126a56fd"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 1, "content": "#define LOG4CXX_TEST 1\n#include \n\n#include \n#include \"../appenderskeletontestcase.h\"\n#include \n#include \n#include \n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace net\n{\n\nclass MockTriggeringEventEvaluator :\n\tpublic virtual spi::TriggeringEventEvaluator\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(MockTriggeringEventEvaluator)\n\t\tLOG4CXX_CAST_ENTRY(spi::TriggeringEventEvaluator)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n", "meta": {"hash_id": "ed097a3759b65e99bdd9bfb12655a490a9c08760c33a7494da9192f68764eded"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "ead03b3d5b76a375b3ebe7f6f08656e560f554ea9e3afd6f048e631e27aa0cf8"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/net/smtpappendertestcase.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "How does the testValid unit test work?", "answer": "The testValid unit test checks the behavior of a properly configured SMTPAppender:\n1. Configures log4cxx using an XML file \"input/xml/smtpAppenderValid.xml\". This presumably specifies an SMTPAppender with valid SMTP settings.\n2. Gets a reference to the root logger.\n3. Logs an info message with some test content. This message alone does not appear to be intended to trigger the appender.\n4. Gets a reference to the configured SMTPAppender with the name \"A1\".\n5. Asserts that the appender exists.\n6. Gets the error handler from the appender, expecting it to be an instance of OnlyOnceErrorHandler.\n7. Asserts that the error handler exists and that it has not reported any errors, since the appender should have been able to successfully send the email.\nThis test verifies that a properly configured SMTPAppender can send email messages without errors. Note that this test will only run if the LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES preprocessor symbol is defined, since it requires real SMTP server details to be provided via environment variables.", "golden_doc_uuids": ["968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4"], "golden_chunk_uuids": [["968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", 6]], "golden_documents": [{"uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#define LOG4CXX_TEST 1\n#include \n\n#include \n#include \"../appenderskeletontestcase.h\"\n#include \n#include \n#include \n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace net\n{\n\nclass MockTriggeringEventEvaluator :\n\tpublic virtual spi::TriggeringEventEvaluator\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(MockTriggeringEventEvaluator)\n\t\tLOG4CXX_CAST_ENTRY(spi::TriggeringEventEvaluator)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tMockTriggeringEventEvaluator()\n\t\t{\n\t\t}\n\n\t\tbool isTriggeringEvent(const spi::LoggingEventPtr& event) override\n\t\t{\n\t\t\treturn true;\n\t\t}\n\tprivate:\n\t\tMockTriggeringEventEvaluator(const MockTriggeringEventEvaluator&);\n\t\tMockTriggeringEventEvaluator& operator=(const MockTriggeringEventEvaluator&);\n};\n}\n}\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\nusing namespace log4cxx::net;\n\nIMPLEMENT_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\n\n/**\n Unit tests of log4cxx::SocketAppender\n */\nclass SMTPAppenderTestCase : public AppenderSkeletonTestCase\n{\n\t\tLOGUNIT_TEST_SUITE(SMTPAppenderTestCase);\n\t\t//\n\t\t// tests inherited from AppenderSkeletonTestCase\n\t\t//\n\t\tLOGUNIT_TEST(testDefaultThreshold);\n\t\tLOGUNIT_TEST(testSetOptionThreshold);\n\t\tLOGUNIT_TEST(testTrigger);\n\t\tLOGUNIT_TEST(testInvalid);\n//#define LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n#ifdef LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n\t\t// This test requires the following environment variables:\n\t\t// LOG4CXX_TEST_EMAIL_RECIPIENT - where the email is sent\n\t\t// LOG4CXX_TEST_SMTP_HOST_NAME - the email server\n\t\tLOGUNIT_TEST(testValid);\n#endif\n\t\tLOGUNIT_TEST_SUITE_END();\n\n\n\tpublic:\n\n\t\tAppenderSkeleton* createAppenderSkeleton() const\n\t\t{\n\t\t\treturn new log4cxx::net::SMTPAppender();\n\t\t}\n\n\t\tvoid setUp()\n\t\t{\n\t\t}\n\n\t\tvoid tearDown()\n\t\t{\n\t\t\tLogManager::resetConfiguration();\n\t\t}\n\n\t\t/**\n\t\t * Tests that triggeringPolicy element will set evaluator.\n\t\t */\n\t\tvoid testTrigger()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppender1.xml\");\n\t\t\tauto appender = log4cxx::cast(Logger::getRootLogger()->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto evaluator = appender->getEvaluator();\n\t\t\tLOGUNIT_ASSERT(evaluator);\n\t\t\tLOGUNIT_ASSERT_EQUAL(true, evaluator->instanceof(MockTriggeringEventEvaluator::getStaticClass()));\n\t\t}\n\n\t\tvoid testInvalid()\n\t\t{\n\t\t\tauto appender = std::make_shared();\n\t\t\tappender->setSMTPHost(LOG4CXX_STR(\"smtp.invalid\"));\n\t\t\tappender->setTo(LOG4CXX_STR(\"you@example.invalid\"));\n\t\t\tappender->setFrom(LOG4CXX_STR(\"me@example.invalid\"));\n\t\t\tappender->setLayout(std::make_shared());\n\t\t\tPool p;\n\t\t\tappender->activateOptions(p);\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\troot->addAppender(appender);\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\");\n\t\t\tLOG4CXX_ERROR(root, \"Sending Message\"); // The DefaultEvaluator should trigger e-mail generation\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(eh->errorReported());\n\t\t}\n\n\n\t\tvoid testValid()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppenderValid.xml\");\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\\n\\nThis paragraph should be preceeded by a blank line.\");\n\n\t\t\tauto appender = log4cxx::cast(root->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(!eh->errorReported());\n\t\t}\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(SMTPAppenderTestCase);\n\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/net/smtpappendertestcase.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "ead03b3d5b76a375b3ebe7f6f08656e560f554ea9e3afd6f048e631e27aa0cf8"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 1, "content": "#define LOG4CXX_TEST 1\n#include \n\n#include \n#include \"../appenderskeletontestcase.h\"\n#include \n#include \n#include \n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace net\n{\n\nclass MockTriggeringEventEvaluator :\n\tpublic virtual spi::TriggeringEventEvaluator\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(MockTriggeringEventEvaluator)\n\t\tLOG4CXX_CAST_ENTRY(spi::TriggeringEventEvaluator)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n", "meta": {"hash_id": "ed097a3759b65e99bdd9bfb12655a490a9c08760c33a7494da9192f68764eded"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 2, "content": "\t\tMockTriggeringEventEvaluator()\n\t\t{\n\t\t}\n\n\t\tbool isTriggeringEvent(const spi::LoggingEventPtr& event) override\n\t\t{\n\t\t\treturn true;\n\t\t}\n\tprivate:\n\t\tMockTriggeringEventEvaluator(const MockTriggeringEventEvaluator&);\n\t\tMockTriggeringEventEvaluator& operator=(const MockTriggeringEventEvaluator&);\n};\n}\n}\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\nusing namespace log4cxx::net;\n\nIMPLEMENT_LOG4CXX_OBJECT(MockTriggeringEventEvaluator)\n\n", "meta": {"hash_id": "c93058ca6e2c021c72ccae9ba4f8c6b76e3e644a1adb16b61505013f331203d6"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 3, "content": "\n/**\n Unit tests of log4cxx::SocketAppender\n */\nclass SMTPAppenderTestCase : public AppenderSkeletonTestCase\n{\n\t\tLOGUNIT_TEST_SUITE(SMTPAppenderTestCase);\n\t\t//\n\t\t// tests inherited from AppenderSkeletonTestCase\n\t\t//\n\t\tLOGUNIT_TEST(testDefaultThreshold);\n\t\tLOGUNIT_TEST(testSetOptionThreshold);\n\t\tLOGUNIT_TEST(testTrigger);\n\t\tLOGUNIT_TEST(testInvalid);\n//#define LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n#ifdef LOG4CXX_TEST_EMAIL_AND_SMTP_HOST_ARE_IN_ENVIRONMENT_VARIABLES\n\t\t// This test requires the following environment variables:\n\t\t// LOG4CXX_TEST_EMAIL_RECIPIENT - where the email is sent\n\t\t// LOG4CXX_TEST_SMTP_HOST_NAME - the email server\n\t\tLOGUNIT_TEST(testValid);\n#endif\n\t\tLOGUNIT_TEST_SUITE_END();\n\n", "meta": {"hash_id": "0278226da7236fe5948e0bb3fb35792b780bd73e4875a20e05714218126a56fd"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 4, "content": "\n\tpublic:\n\n\t\tAppenderSkeleton* createAppenderSkeleton() const\n\t\t{\n\t\t\treturn new log4cxx::net::SMTPAppender();\n\t\t}\n\n\t\tvoid setUp()\n\t\t{\n\t\t}\n\n\t\tvoid tearDown()\n\t\t{\n\t\t\tLogManager::resetConfiguration();\n\t\t}\n\n\t\t/**\n\t\t * Tests that triggeringPolicy element will set evaluator.\n\t\t */\n\t\tvoid testTrigger()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppender1.xml\");\n\t\t\tauto appender = log4cxx::cast(Logger::getRootLogger()->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto evaluator = appender->getEvaluator();\n\t\t\tLOGUNIT_ASSERT(evaluator);\n\t\t\tLOGUNIT_ASSERT_EQUAL(true, evaluator->instanceof(MockTriggeringEventEvaluator::getStaticClass()));\n\t\t}\n\n", "meta": {"hash_id": "3360d4d9396805e67223a64be195865fe1cc13b79113d710306bfeb8a7554a46"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 5, "content": "\t\tvoid testInvalid()\n\t\t{\n\t\t\tauto appender = std::make_shared();\n\t\t\tappender->setSMTPHost(LOG4CXX_STR(\"smtp.invalid\"));\n\t\t\tappender->setTo(LOG4CXX_STR(\"you@example.invalid\"));\n\t\t\tappender->setFrom(LOG4CXX_STR(\"me@example.invalid\"));\n\t\t\tappender->setLayout(std::make_shared());\n\t\t\tPool p;\n\t\t\tappender->activateOptions(p);\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\troot->addAppender(appender);\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\");\n\t\t\tLOG4CXX_ERROR(root, \"Sending Message\"); // The DefaultEvaluator should trigger e-mail generation\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(eh->errorReported());\n\t\t}\n\n", "meta": {"hash_id": "ebc0902f7b5de3a7ca3214afef3045852976a89d3ebdbbd8c0c47d84c508d845"}}, {"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 6, "content": "\n\t\tvoid testValid()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppenderValid.xml\");\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\\n\\nThis paragraph should be preceeded by a blank line.\");\n\n\t\t\tauto appender = log4cxx::cast(root->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(!eh->errorReported());\n\t\t}\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(SMTPAppenderTestCase);\n\n", "meta": {"hash_id": "d0c7045f8cc884a57e5671ab4b31e0548a3e70b6f35d56c5c40681f7dc44e0a1"}}]}], "golden_chunks": [{"doc_uuid": "968bc75b566183e5c5072fff16b22bd071cbcc34e15d48d945b5b870d0f3bfe4", "index": 6, "content": "\n\t\tvoid testValid()\n\t\t{\n\t\t\txml::DOMConfigurator::configure(\"input/xml/smtpAppenderValid.xml\");\n\t\t\tauto root = Logger::getRootLogger();\n\t\t\tLOG4CXX_INFO(root, \"Hello, World.\\n\\nThis paragraph should be preceeded by a blank line.\");\n\n\t\t\tauto appender = log4cxx::cast(root->getAppender(LOG4CXX_STR(\"A1\")));\n\t\t\tLOGUNIT_ASSERT(appender);\n\t\t\tauto eh = dynamic_cast(appender->getErrorHandler().get());\n\t\t\tLOGUNIT_ASSERT(eh);\n\t\t\tLOGUNIT_ASSERT(!eh->errorReported());\n\t\t}\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(SMTPAppenderTestCase);\n\n", "meta": {"hash_id": "d0c7045f8cc884a57e5671ab4b31e0548a3e70b6f35d56c5c40681f7dc44e0a1"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/net/smtpappendertestcase.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "How do you configure the logging system with a specific layout using BasicConfigurator?", "answer": "To configure the logging system with a specific layout, you can call the `BasicConfigurator::configure(const LayoutPtr& layoutArg)` method. If `layoutArg` is not provided, it defaults to a PatternLayout with the conversion pattern `\"%r [%t] %p %c %x - %m%n\"`. This method sets the logger repository as configured and adds a ConsoleAppender with the specified layout to the root logger.", "golden_doc_uuids": ["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863"], "golden_chunk_uuids": [["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", 1], ["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", 0]], "golden_documents": [{"uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\nvoid BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/basicconfigurator.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n", "meta": {"hash_id": "d36d1e05eddeb14beaf07d0f9baa63d5fb77042f56507196dc06d63b96f53299"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 1, "content": " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\n", "meta": {"hash_id": "c52bc2c15c741b1013490698182abf18cb386d449eba1e88a7f080d9de101128"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 2, "content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"hash_id": "5fc7005106eb4c6c6f9ff85f29af786ea7db6c29c970614436dd3b214dab11e2"}}]}], "golden_chunks": [{"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 1, "content": " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\n", "meta": {"hash_id": "c52bc2c15c741b1013490698182abf18cb386d449eba1e88a7f080d9de101128"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n", "meta": {"hash_id": "d36d1e05eddeb14beaf07d0f9baa63d5fb77042f56507196dc06d63b96f53299"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/basicconfigurator.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How do you configure the logging system with a specific appender using BasicConfigurator?", "answer": "To configure the logging system with a specific appender, you can call the `BasicConfigurator::configure(const AppenderPtr& appender)` method. This method adds the provided appender to the root logger.", "golden_doc_uuids": ["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863"], "golden_chunk_uuids": [["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", 2]], "golden_documents": [{"uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\nvoid BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/basicconfigurator.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n", "meta": {"hash_id": "d36d1e05eddeb14beaf07d0f9baa63d5fb77042f56507196dc06d63b96f53299"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 1, "content": " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\n", "meta": {"hash_id": "c52bc2c15c741b1013490698182abf18cb386d449eba1e88a7f080d9de101128"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 2, "content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"hash_id": "5fc7005106eb4c6c6f9ff85f29af786ea7db6c29c970614436dd3b214dab11e2"}}]}], "golden_chunks": [{"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 2, "content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"hash_id": "5fc7005106eb4c6c6f9ff85f29af786ea7db6c29c970614436dd3b214dab11e2"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/basicconfigurator.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How do you reset the logging configuration to its default state using BasicConfigurator?", "answer": "To reset the logging configuration to its default state, you can call the `BasicConfigurator::resetConfiguration()` method. This method calls `LogManager::resetConfiguration()` internally, which resets the logging configuration to its default state.", "golden_doc_uuids": ["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863"], "golden_chunk_uuids": [["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", 2]], "golden_documents": [{"uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\nvoid BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/basicconfigurator.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n", "meta": {"hash_id": "d36d1e05eddeb14beaf07d0f9baa63d5fb77042f56507196dc06d63b96f53299"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 1, "content": " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\n", "meta": {"hash_id": "c52bc2c15c741b1013490698182abf18cb386d449eba1e88a7f080d9de101128"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 2, "content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"hash_id": "5fc7005106eb4c6c6f9ff85f29af786ea7db6c29c970614436dd3b214dab11e2"}}]}], "golden_chunks": [{"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 2, "content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"hash_id": "5fc7005106eb4c6c6f9ff85f29af786ea7db6c29c970614436dd3b214dab11e2"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/basicconfigurator.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What is the default layout used by BasicConfigurator if no layout is provided?", "answer": "If no layout is provided to `BasicConfigurator::configure(const LayoutPtr& layoutArg)`, it defaults to a PatternLayout with the conversion pattern `\"%r [%t] %p %c %x - %m%n\"`. This is defined by the static constant `TTCC_CONVERSION_PATTERN` in the code:\n```cpp\nstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n```", "golden_doc_uuids": ["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863"], "golden_chunk_uuids": [["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", 2]], "golden_documents": [{"uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\nvoid BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/basicconfigurator.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n", "meta": {"hash_id": "d36d1e05eddeb14beaf07d0f9baa63d5fb77042f56507196dc06d63b96f53299"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 1, "content": " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\n", "meta": {"hash_id": "c52bc2c15c741b1013490698182abf18cb386d449eba1e88a7f080d9de101128"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 2, "content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"hash_id": "5fc7005106eb4c6c6f9ff85f29af786ea7db6c29c970614436dd3b214dab11e2"}}]}], "golden_chunks": [{"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 2, "content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"hash_id": "5fc7005106eb4c6c6f9ff85f29af786ea7db6c29c970614436dd3b214dab11e2"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/basicconfigurator.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the WideLife template class used in the code?", "answer": "The `WideLife` template class is used to store the `TTCC_CONVERSION_PATTERN` constant. It is defined in the `log4cxx::helpers` namespace and is used to manage the lifetime of the wide string conversion pattern. The exact purpose and implementation of `WideLife` is not shown in the provided code snippet.", "golden_doc_uuids": ["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863"], "golden_chunk_uuids": [["652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", 1]], "golden_documents": [{"uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\nvoid BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/basicconfigurator.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n", "meta": {"hash_id": "d36d1e05eddeb14beaf07d0f9baa63d5fb77042f56507196dc06d63b96f53299"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 1, "content": " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\n", "meta": {"hash_id": "c52bc2c15c741b1013490698182abf18cb386d449eba1e88a7f080d9de101128"}}, {"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 2, "content": "void BasicConfigurator::configure(const LayoutPtr& layoutArg)\n{\n\tLogManager::getLoggerRepository()->setConfigured(true);\n\tauto layout = layoutArg;\n\tif (!layout)\n\t{\n\t\tstatic const helpers::WideLife TTCC_CONVERSION_PATTERN(LOG4CXX_STR(\"%r [%t] %p %c %x - %m%n\"));\n\t\tlayout = std::make_shared(TTCC_CONVERSION_PATTERN);\n\t}\n\tauto appender = std::make_shared(layout);\n\tLogger::getRootLogger()->addAppender(appender);\n}\n\nvoid BasicConfigurator::configure(const AppenderPtr& appender)\n{\n\tLoggerPtr root = Logger::getRootLogger();\n\troot->addAppender(appender);\n}\n\nvoid BasicConfigurator::resetConfiguration()\n{\n\tLogManager::resetConfiguration();\n}\n", "meta": {"hash_id": "5fc7005106eb4c6c6f9ff85f29af786ea7db6c29c970614436dd3b214dab11e2"}}]}], "golden_chunks": [{"doc_uuid": "652e911eecd9e07046e7e3dd91729064fca4abf1950dc96663a3f734b7673863", "index": 1, "content": " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\n", "meta": {"hash_id": "c52bc2c15c741b1013490698182abf18cb386d449eba1e88a7f080d9de101128"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/basicconfigurator.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How does the hexdump function handle different character types for logging?", "answer": "The hexdump function uses conditional compilation to handle different character types for logging. It checks if LOG4CXX_LOGCHAR_IS_WCHAR is defined, and based on that, it sets the fill_char and space_fill_char variables accordingly:\n\n#if LOG4CXX_LOGCHAR_IS_WCHAR\n\tconst wchar_t fill_char = L'0';\n\tconst wchar_t space_fill_char = L' ';\n#else\n\tconst logchar fill_char = '0';\n\tconst logchar space_fill_char = ' ';\n#endif\n\nThis allows the function to work with both wide characters (wchar_t) and regular characters (logchar) depending on the configuration.", "golden_doc_uuids": ["6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a"], "golden_chunk_uuids": [["6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a", 1]], "golden_documents": [{"uuid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \n/* Prevent error C2491: 'std::numpunct<_Elem>::id': definition of dllimport static data member not allowed */\n#if defined(_MSC_VER) && (LOG4CXX_UNICHAR_API || LOG4CXX_LOGCHAR_IS_UNICHAR)\n#define __FORCE_INSTANCE\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\ntypedef std::basic_stringstream LogStream;\n\nLogString LOG4CXX_NS::hexdump(const void* bytes, uint32_t len, HexdumpFlags flags){\n\tLogString ret;\n\tconst uint8_t* bytes_u8 = static_cast(bytes);\n\tLogStream sstream;\n#if LOG4CXX_LOGCHAR_IS_WCHAR\n\tconst wchar_t fill_char = L'0';\n\tconst wchar_t space_fill_char = L' ';\n#else\n\tconst logchar fill_char = '0';\n\tconst logchar space_fill_char = ' ';\n#endif\n\n\tif(flags & HexdumpFlags::AddStartingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\tfor(uint32_t offset = 0; offset < len; offset += 16){\n\t\tif(offset != 0){\n\t\t\tsstream << LOG4CXX_EOL;\n\t\t}\n\n\t\t// Print out the offset\n\t\tsstream << std::hex << std::setw(8) << std::setfill(fill_char) << offset << std::resetiosflags(std::ios_base::fmtflags(0));\n\n\t\tsstream << std::setw(0) << LOG4CXX_STR(\" \");\n\n\t\t// Print out the first 8 bytes\n\t\tfor(int byte = 0; byte < 8; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 8){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tsstream << std::setfill(space_fill_char);\n\t\t\tif(byte != 8){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n\t\tsstream << LOG4CXX_STR(\" \");\n\n\t\t// Print out the last 8 bytes\n\t\tfor(int byte = 8; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 15){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tif(byte != 15){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n\t\t// Print out the ASCII text\n\t\tsstream << LOG4CXX_STR(\" |\");\n\t\tfor(int byte = 0; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tif(std::isprint(bytes_u8[offset + byte])){\n\t\t\t\tlogchar to_append = bytes_u8[offset + byte];\n\t\t\t\tsstream << to_append;\n\t\t\t}else{\n\t\t\t\tsstream << LOG4CXX_STR(\".\");\n\t\t\t}\n\t\t}\n\t\tsstream << LOG4CXX_STR(\"|\");\n\t}\n\n\tif(flags & HexdumpFlags::AddEndingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\treturn sstream.str();\n}\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/hexdump.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 4, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "8b9d3fd5e54edd6c1e1a2e3e2438b25b71d91203ce0e4fb24dfd618e5c27999d"}}, {"doc_uuid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a", "index": 1, "content": "#include \n/* Prevent error C2491: 'std::numpunct<_Elem>::id': definition of dllimport static data member not allowed */\n#if defined(_MSC_VER) && (LOG4CXX_UNICHAR_API || LOG4CXX_LOGCHAR_IS_UNICHAR)\n#define __FORCE_INSTANCE\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\ntypedef std::basic_stringstream LogStream;\n\nLogString LOG4CXX_NS::hexdump(const void* bytes, uint32_t len, HexdumpFlags flags){\n\tLogString ret;\n\tconst uint8_t* bytes_u8 = static_cast(bytes);\n\tLogStream sstream;\n#if LOG4CXX_LOGCHAR_IS_WCHAR\n\tconst wchar_t fill_char = L'0';\n\tconst wchar_t space_fill_char = L' ';\n#else\n\tconst logchar fill_char = '0';\n\tconst logchar space_fill_char = ' ';\n#endif\n\n", "meta": {"hash_id": "21a86a198a6ffabd5218524e768f6118b477510be6dcb73cd275178197fc058d"}}, {"doc_uuid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a", "index": 2, "content": "\tif(flags & HexdumpFlags::AddStartingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\tfor(uint32_t offset = 0; offset < len; offset += 16){\n\t\tif(offset != 0){\n\t\t\tsstream << LOG4CXX_EOL;\n\t\t}\n\n\t\t// Print out the offset\n\t\tsstream << std::hex << std::setw(8) << std::setfill(fill_char) << offset << std::resetiosflags(std::ios_base::fmtflags(0));\n\n\t\tsstream << std::setw(0) << LOG4CXX_STR(\" \");\n\n\t\t// Print out the first 8 bytes\n\t\tfor(int byte = 0; byte < 8; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 8){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tsstream << std::setfill(space_fill_char);\n\t\t\tif(byte != 8){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n", "meta": {"hash_id": "5681a6339f18b7d246cd5f88bb81f21f07b3f7444c5617055a16340e61a5511d"}}, {"doc_uuid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a", "index": 3, "content": "\t\tsstream << LOG4CXX_STR(\" \");\n\n\t\t// Print out the last 8 bytes\n\t\tfor(int byte = 8; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\tif(byte != 15){\n\t\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t\t}\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tsstream << std::hex << std::setw(2) << std::setfill(fill_char) << static_cast(bytes_u8[offset + byte]) << std::resetiosflags(std::ios_base::fmtflags(0));\n\t\t\tif(byte != 15){\n\t\t\t\tsstream << LOG4CXX_STR(\" \");\n\t\t\t}\n\t\t}\n\n\t\t// Print out the ASCII text\n\t\tsstream << LOG4CXX_STR(\" |\");\n\t\tfor(int byte = 0; byte < 16; byte++){\n\t\t\tif(offset + byte >= len){\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tif(std::isprint(bytes_u8[offset + byte])){\n\t\t\t\tlogchar to_append = bytes_u8[offset + byte];\n\t\t\t\tsstream << to_append;\n\t\t\t}else{\n\t\t\t\tsstream << LOG4CXX_STR(\".\");\n\t\t\t}\n\t\t}\n\t\tsstream << LOG4CXX_STR(\"|\");\n\t}\n\n\tif(flags & HexdumpFlags::AddEndingNewline){\n\t\tsstream << LOG4CXX_EOL;\n\t}\n\n\treturn sstream.str();\n}\n", "meta": {"hash_id": "5518a7a6be44a9655d2b5d42baae58c05a94e4da2cbcc27ea71c9961caaf8ace"}}]}], "golden_chunks": [{"doc_uuid": "6649ef219cc071b0c0f28a358d32e0ddc8e9e09eeac6707326705f88e286d90a", "index": 1, "content": "#include \n/* Prevent error C2491: 'std::numpunct<_Elem>::id': definition of dllimport static data member not allowed */\n#if defined(_MSC_VER) && (LOG4CXX_UNICHAR_API || LOG4CXX_LOGCHAR_IS_UNICHAR)\n#define __FORCE_INSTANCE\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n\nusing namespace LOG4CXX_NS;\n\ntypedef std::basic_stringstream LogStream;\n\nLogString LOG4CXX_NS::hexdump(const void* bytes, uint32_t len, HexdumpFlags flags){\n\tLogString ret;\n\tconst uint8_t* bytes_u8 = static_cast(bytes);\n\tLogStream sstream;\n#if LOG4CXX_LOGCHAR_IS_WCHAR\n\tconst wchar_t fill_char = L'0';\n\tconst wchar_t space_fill_char = L' ';\n#else\n\tconst logchar fill_char = '0';\n\tconst logchar space_fill_char = ' ';\n#endif\n\n", "meta": {"hash_id": "21a86a198a6ffabd5218524e768f6118b477510be6dcb73cd275178197fc058d"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/cpp/hexdump.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 4, "source": "codebases/jsonl"}} +{"query": "How do you create a DenyAllFilter instance?", "answer": "To create a DenyAllFilter instance, you can use the following code:\n\nFilterPtr filter(new DenyAllFilter());\n\nThis creates a smart pointer (FilterPtr) to a new instance of the DenyAllFilter class.", "golden_doc_uuids": ["b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac"], "golden_chunk_uuids": [["b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", 0]], "golden_documents": [{"uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/filter/denyallfiltertest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\n", "meta": {"hash_id": "01b21d24d2c90baf37295cd3728f7190569fc53cd9e00554d701007ba7bc3e4e"}}, {"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 1, "content": "using namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "meta": {"hash_id": "f9cc96a519be245dc9d66fc5053712ea44c05499fc01c5e233cfb00013896cde"}}]}], "golden_chunks": [{"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\n", "meta": {"hash_id": "01b21d24d2c90baf37295cd3728f7190569fc53cd9e00554d701007ba7bc3e4e"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/filter/denyallfiltertest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What do you need to do after creating a DenyAllFilter instance?", "answer": "After creating a DenyAllFilter instance, you need to call the activateOptions() method, passing in a Pool object. This can be seen in the test case:\n\nPool p;\nfilter->activateOptions(p);\n\nPresumably, this initializes the filter with any necessary options.", "golden_doc_uuids": ["b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac"], "golden_chunk_uuids": [["b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", 1]], "golden_documents": [{"uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/filter/denyallfiltertest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\n", "meta": {"hash_id": "01b21d24d2c90baf37295cd3728f7190569fc53cd9e00554d701007ba7bc3e4e"}}, {"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 1, "content": "using namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "meta": {"hash_id": "f9cc96a519be245dc9d66fc5053712ea44c05499fc01c5e233cfb00013896cde"}}]}], "golden_chunks": [{"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 1, "content": "using namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "meta": {"hash_id": "f9cc96a519be245dc9d66fc5053712ea44c05499fc01c5e233cfb00013896cde"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/filter/denyallfiltertest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the signature of the decide() method of the DenyAllFilter?", "answer": "The decide() method of the DenyAllFilter takes a LoggingEventPtr as a parameter and returns a Filter::Decision value. This can be inferred from the test case:\n\nLoggingEventPtr event(new LoggingEvent(\n LOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n Level::getInfo(),\n LOG4CXX_STR(\"Hello, World\"),\n LOG4CXX_LOCATION));\n...\nLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n", "golden_doc_uuids": ["b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac"], "golden_chunk_uuids": [["b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", 1]], "golden_documents": [{"uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/filter/denyallfiltertest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\n", "meta": {"hash_id": "01b21d24d2c90baf37295cd3728f7190569fc53cd9e00554d701007ba7bc3e4e"}}, {"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 1, "content": "using namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "meta": {"hash_id": "f9cc96a519be245dc9d66fc5053712ea44c05499fc01c5e233cfb00013896cde"}}]}], "golden_chunks": [{"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 1, "content": "using namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "meta": {"hash_id": "f9cc96a519be245dc9d66fc5053712ea44c05499fc01c5e233cfb00013896cde"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/filter/denyallfiltertest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What namespaces are used in this file?", "answer": "This file uses the following namespaces:\n\nusing namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\nThese provide access to various parts of the log4cxx library, including the filter classes, logging events, and helper utilities.", "golden_doc_uuids": ["b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac"], "golden_chunk_uuids": [["b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", 0]], "golden_documents": [{"uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\nusing namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/filter/denyallfiltertest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\n", "meta": {"hash_id": "01b21d24d2c90baf37295cd3728f7190569fc53cd9e00554d701007ba7bc3e4e"}}, {"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 1, "content": "using namespace log4cxx;\nusing namespace log4cxx::filter;\nusing namespace log4cxx::spi;\nusing namespace log4cxx::helpers;\n\n\n/**\n * Unit tests for DenyAllFilter.\n */\nLOGUNIT_CLASS(DenyAllFilterTest)\n{\n\tLOGUNIT_TEST_SUITE(DenyAllFilterTest);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\t/**\n\t * Check that DenyAllFilter.decide() returns Filter.DENY.\n\t */\n\tvoid test1()\n\t{\n\t\tLoggingEventPtr event(new LoggingEvent(\n\t\t\t\tLOG4CXX_STR(\"org.apache.log4j.filter.DenyAllFilterTest\"),\n\t\t\t\tLevel::getInfo(),\n\t\t\t\tLOG4CXX_STR(\"Hello, World\"),\n\t\t\t\tLOG4CXX_LOCATION));\n\t\tFilterPtr filter(new DenyAllFilter());\n\t\tPool p;\n\t\tfilter->activateOptions(p);\n\t\tLOGUNIT_ASSERT_EQUAL(Filter::DENY, filter->decide(event));\n\t}\n\n};\n\nLOGUNIT_TEST_SUITE_REGISTRATION(DenyAllFilterTest);\n\n\n", "meta": {"hash_id": "f9cc96a519be245dc9d66fc5053712ea44c05499fc01c5e233cfb00013896cde"}}]}], "golden_chunks": [{"doc_uuid": "b06cf4af0fab3962648faef939278c1e4865a64d3a8971230968e55e178a58ac", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \n#include \n#include \n#include \n#include \"../logunit.h\"\n\n", "meta": {"hash_id": "01b21d24d2c90baf37295cd3728f7190569fc53cd9e00554d701007ba7bc3e4e"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/filter/denyallfiltertest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the MinimumTestCase class?", "answer": "The MinimumTestCase class is a test suite that contains test cases for testing the minimum functionality of the log4cxx logging library. This can be inferred from the class name and the LOGUNIT_TEST_SUITE and LOGUNIT_TEST macros used to define the test cases.", "golden_doc_uuids": ["e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76"], "golden_chunk_uuids": [["e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", 1]], "golden_documents": [{"uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \n#include \n#include \n#include \n\n#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/threadfilter.h\"\n#include \n#include \n#include \n#include \n#include \"testchar.h\"\n#include \n#include \n#include \n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(MinimumTestCase)\n{\n\tLOGUNIT_TEST_SUITE(MinimumTestCase);\n\tLOGUNIT_TEST(simple);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\troot->removeAllAppenders();\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid simple()\n\t{\n\t\tLayoutPtr layout = LayoutPtr(new SimpleLayout());\n\t\tAppenderPtr appender = FileAppenderPtr(new FileAppender(layout, LOG4CXX_STR(\"output/simple\"), false));\n\t\troot->addAppender(appender);\n\t\tcommon();\n\n\t\tLOGUNIT_ASSERT(Compare::compare(LOG4CXX_FILE(\"output/simple\"), LOG4CXX_FILE(\"witness/simple\")));\n\t}\n\n\tstd::string createMessage(int i, Pool & pool)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = 0;\n\n\t\t// In the lines below, the logger names are chosen as an aid in\n\t\t// remembering their level values. In general, the logger names\n\t\t// have no bearing to level values.\n\t\tLoggerPtr ERRlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"ERR\"));\n\t\tERRlogger->setLevel(Level::getError());\n\n\t\tLoggerPtr INF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF\"));\n\t\tINF->setLevel(Level::getInfo());\n\n\t\tLoggerPtr INF_ERR = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR\"));\n\t\tINF_ERR->setLevel(Level::getError());\n\n\t\tLoggerPtr DEB = Logger::getLogger(LOG4CXX_TEST_STR(\"DEB\"));\n\t\tDEB->setLevel(Level::getDebug());\n\n\t\t// Note: categories with undefined level\n\t\tLoggerPtr INF_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.UNDEF\"));\n\t\tLoggerPtr INF_ERR_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR.UNDEF\"));\n\t\tLoggerPtr UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"UNDEF\"));\n\n\t\tstd::string msg(\"Message \");\n\n\t\tPool pool;\n\n\t\t// These should all log.----------------------------\n\t\tLOG4CXX_FATAL(ERRlogger, createMessage(i, pool));\n\t\ti++; //0\n\t\tLOG4CXX_ERROR(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF, createMessage(i, pool));\n\t\ti++; // 2\n\t\tLOG4CXX_ERROR(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_UNDEF, createMessage(i, pool));\n\t\ti++; //6\n\t\tLOG4CXX_ERROR(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR, createMessage(i, pool));\n\t\ti++; // 10\n\t\tLOG4CXX_ERROR(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_ERROR(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(DEB, createMessage(i, pool));\n\t\ti++; //14\n\t\tLOG4CXX_ERROR(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(DEB, createMessage(i, pool));\n\t\ti++;\n\n\t\t// defaultLevel=DEBUG\n\t\tLOG4CXX_FATAL(UNDEF, createMessage(i, pool));\n\t\ti++; // 19\n\t\tLOG4CXX_ERROR(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\t// The following should not log\n\t\tLOG4CXX_WARN(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_DEBUG(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_WARN(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\tLOG4CXX_INFO(INF, LOG4CXX_TEST_STR(\"Messages should bear numbers 0 through 23.\"));\n\t}\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\nprivate:\n\tstatic const File FILTERED;\n};\n\n\nconst File MinimumTestCase::FILTERED(\"output/minimumfiltered\");\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(MinimumTestCase);\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/minimumtestcase.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \n#include \n#include \n#include \n\n", "meta": {"hash_id": "d2c36e2e91283b58913a3ad307c210934fac589aa26f88ecf471b234a35012dc"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 1, "content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/threadfilter.h\"\n#include \n#include \n#include \n#include \n#include \"testchar.h\"\n#include \n#include \n#include \n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(MinimumTestCase)\n{\n\tLOGUNIT_TEST_SUITE(MinimumTestCase);\n\tLOGUNIT_TEST(simple);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\troot->removeAllAppenders();\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n", "meta": {"hash_id": "62aa50a5bfac787515eda989b06c67c922a44765b8b411d9ce4adec178205f76"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 2, "content": "\tvoid simple()\n\t{\n\t\tLayoutPtr layout = LayoutPtr(new SimpleLayout());\n\t\tAppenderPtr appender = FileAppenderPtr(new FileAppender(layout, LOG4CXX_STR(\"output/simple\"), false));\n\t\troot->addAppender(appender);\n\t\tcommon();\n\n\t\tLOGUNIT_ASSERT(Compare::compare(LOG4CXX_FILE(\"output/simple\"), LOG4CXX_FILE(\"witness/simple\")));\n\t}\n\n\tstd::string createMessage(int i, Pool & pool)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = 0;\n\n\t\t// In the lines below, the logger names are chosen as an aid in\n\t\t// remembering their level values. In general, the logger names\n\t\t// have no bearing to level values.\n\t\tLoggerPtr ERRlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"ERR\"));\n\t\tERRlogger->setLevel(Level::getError());\n\n", "meta": {"hash_id": "cd02e8e6bd721bdc9f4f6c1064486564fa92d83ec6a53ab6d032d495a27b9434"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 3, "content": "\t\tLoggerPtr INF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF\"));\n\t\tINF->setLevel(Level::getInfo());\n\n\t\tLoggerPtr INF_ERR = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR\"));\n\t\tINF_ERR->setLevel(Level::getError());\n\n\t\tLoggerPtr DEB = Logger::getLogger(LOG4CXX_TEST_STR(\"DEB\"));\n\t\tDEB->setLevel(Level::getDebug());\n\n\t\t// Note: categories with undefined level\n\t\tLoggerPtr INF_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.UNDEF\"));\n\t\tLoggerPtr INF_ERR_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR.UNDEF\"));\n\t\tLoggerPtr UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"UNDEF\"));\n\n", "meta": {"hash_id": "d62a480f742cdc458eec47f896f274a7766f820b684cf6b5b4655b4fd9404c0b"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 4, "content": "\t\tstd::string msg(\"Message \");\n\n\t\tPool pool;\n\n\t\t// These should all log.----------------------------\n\t\tLOG4CXX_FATAL(ERRlogger, createMessage(i, pool));\n\t\ti++; //0\n\t\tLOG4CXX_ERROR(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF, createMessage(i, pool));\n\t\ti++; // 2\n\t\tLOG4CXX_ERROR(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_UNDEF, createMessage(i, pool));\n\t\ti++; //6\n\t\tLOG4CXX_ERROR(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR, createMessage(i, pool));\n\t\ti++; // 10\n\t\tLOG4CXX_ERROR(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_ERROR(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n", "meta": {"hash_id": "0643e27dfa25a080ad6a4bbe039b01d7ddad4efc7b1cd12ef714056035f5305e"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 5, "content": "\t\tLOG4CXX_FATAL(DEB, createMessage(i, pool));\n\t\ti++; //14\n\t\tLOG4CXX_ERROR(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(DEB, createMessage(i, pool));\n\t\ti++;\n\n\t\t// defaultLevel=DEBUG\n\t\tLOG4CXX_FATAL(UNDEF, createMessage(i, pool));\n\t\ti++; // 19\n\t\tLOG4CXX_ERROR(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\t// The following should not log\n\t\tLOG4CXX_WARN(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_DEBUG(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n", "meta": {"hash_id": "75f778ab13182174e87626bb5fa903b8d0b39484f2cf15d4e07b5ac87466c6cd"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 6, "content": "\t\tLOG4CXX_WARN(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\tLOG4CXX_INFO(INF, LOG4CXX_TEST_STR(\"Messages should bear numbers 0 through 23.\"));\n\t}\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\nprivate:\n\tstatic const File FILTERED;\n};\n\n\nconst File MinimumTestCase::FILTERED(\"output/minimumfiltered\");\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(MinimumTestCase);\n", "meta": {"hash_id": "5a7a750026bec5510142ad6866af3fdbbca09adb729612c41a95f122394a8431"}}]}], "golden_chunks": [{"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 1, "content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/threadfilter.h\"\n#include \n#include \n#include \n#include \n#include \"testchar.h\"\n#include \n#include \n#include \n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(MinimumTestCase)\n{\n\tLOGUNIT_TEST_SUITE(MinimumTestCase);\n\tLOGUNIT_TEST(simple);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\troot->removeAllAppenders();\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n", "meta": {"hash_id": "62aa50a5bfac787515eda989b06c67c922a44765b8b411d9ce4adec178205f76"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/minimumtestcase.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the common() method?", "answer": "The common() method contains a series of logging statements at different levels (FATAL, ERROR, WARN, INFO, DEBUG) using loggers with different level configurations. It is used to test the behavior of the logging framework under various logging scenarios. The method logs messages with incrementing numbers and checks which messages are actually logged based on the logger configurations.", "golden_doc_uuids": ["e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76"], "golden_chunk_uuids": [["e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", 3], ["e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", 2]], "golden_documents": [{"uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \n#include \n#include \n#include \n\n#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/threadfilter.h\"\n#include \n#include \n#include \n#include \n#include \"testchar.h\"\n#include \n#include \n#include \n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(MinimumTestCase)\n{\n\tLOGUNIT_TEST_SUITE(MinimumTestCase);\n\tLOGUNIT_TEST(simple);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\troot->removeAllAppenders();\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid simple()\n\t{\n\t\tLayoutPtr layout = LayoutPtr(new SimpleLayout());\n\t\tAppenderPtr appender = FileAppenderPtr(new FileAppender(layout, LOG4CXX_STR(\"output/simple\"), false));\n\t\troot->addAppender(appender);\n\t\tcommon();\n\n\t\tLOGUNIT_ASSERT(Compare::compare(LOG4CXX_FILE(\"output/simple\"), LOG4CXX_FILE(\"witness/simple\")));\n\t}\n\n\tstd::string createMessage(int i, Pool & pool)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = 0;\n\n\t\t// In the lines below, the logger names are chosen as an aid in\n\t\t// remembering their level values. In general, the logger names\n\t\t// have no bearing to level values.\n\t\tLoggerPtr ERRlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"ERR\"));\n\t\tERRlogger->setLevel(Level::getError());\n\n\t\tLoggerPtr INF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF\"));\n\t\tINF->setLevel(Level::getInfo());\n\n\t\tLoggerPtr INF_ERR = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR\"));\n\t\tINF_ERR->setLevel(Level::getError());\n\n\t\tLoggerPtr DEB = Logger::getLogger(LOG4CXX_TEST_STR(\"DEB\"));\n\t\tDEB->setLevel(Level::getDebug());\n\n\t\t// Note: categories with undefined level\n\t\tLoggerPtr INF_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.UNDEF\"));\n\t\tLoggerPtr INF_ERR_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR.UNDEF\"));\n\t\tLoggerPtr UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"UNDEF\"));\n\n\t\tstd::string msg(\"Message \");\n\n\t\tPool pool;\n\n\t\t// These should all log.----------------------------\n\t\tLOG4CXX_FATAL(ERRlogger, createMessage(i, pool));\n\t\ti++; //0\n\t\tLOG4CXX_ERROR(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF, createMessage(i, pool));\n\t\ti++; // 2\n\t\tLOG4CXX_ERROR(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_UNDEF, createMessage(i, pool));\n\t\ti++; //6\n\t\tLOG4CXX_ERROR(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR, createMessage(i, pool));\n\t\ti++; // 10\n\t\tLOG4CXX_ERROR(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_ERROR(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(DEB, createMessage(i, pool));\n\t\ti++; //14\n\t\tLOG4CXX_ERROR(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(DEB, createMessage(i, pool));\n\t\ti++;\n\n\t\t// defaultLevel=DEBUG\n\t\tLOG4CXX_FATAL(UNDEF, createMessage(i, pool));\n\t\ti++; // 19\n\t\tLOG4CXX_ERROR(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\t// The following should not log\n\t\tLOG4CXX_WARN(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_DEBUG(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_WARN(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\tLOG4CXX_INFO(INF, LOG4CXX_TEST_STR(\"Messages should bear numbers 0 through 23.\"));\n\t}\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\nprivate:\n\tstatic const File FILTERED;\n};\n\n\nconst File MinimumTestCase::FILTERED(\"output/minimumfiltered\");\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(MinimumTestCase);\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/minimumtestcase.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 7, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \n#include \n#include \n#include \n\n", "meta": {"hash_id": "d2c36e2e91283b58913a3ad307c210934fac589aa26f88ecf471b234a35012dc"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 1, "content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/threadfilter.h\"\n#include \n#include \n#include \n#include \n#include \"testchar.h\"\n#include \n#include \n#include \n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(MinimumTestCase)\n{\n\tLOGUNIT_TEST_SUITE(MinimumTestCase);\n\tLOGUNIT_TEST(simple);\n\tLOGUNIT_TEST_SUITE_END();\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\troot->removeAllAppenders();\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n", "meta": {"hash_id": "62aa50a5bfac787515eda989b06c67c922a44765b8b411d9ce4adec178205f76"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 2, "content": "\tvoid simple()\n\t{\n\t\tLayoutPtr layout = LayoutPtr(new SimpleLayout());\n\t\tAppenderPtr appender = FileAppenderPtr(new FileAppender(layout, LOG4CXX_STR(\"output/simple\"), false));\n\t\troot->addAppender(appender);\n\t\tcommon();\n\n\t\tLOGUNIT_ASSERT(Compare::compare(LOG4CXX_FILE(\"output/simple\"), LOG4CXX_FILE(\"witness/simple\")));\n\t}\n\n\tstd::string createMessage(int i, Pool & pool)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = 0;\n\n\t\t// In the lines below, the logger names are chosen as an aid in\n\t\t// remembering their level values. In general, the logger names\n\t\t// have no bearing to level values.\n\t\tLoggerPtr ERRlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"ERR\"));\n\t\tERRlogger->setLevel(Level::getError());\n\n", "meta": {"hash_id": "cd02e8e6bd721bdc9f4f6c1064486564fa92d83ec6a53ab6d032d495a27b9434"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 3, "content": "\t\tLoggerPtr INF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF\"));\n\t\tINF->setLevel(Level::getInfo());\n\n\t\tLoggerPtr INF_ERR = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR\"));\n\t\tINF_ERR->setLevel(Level::getError());\n\n\t\tLoggerPtr DEB = Logger::getLogger(LOG4CXX_TEST_STR(\"DEB\"));\n\t\tDEB->setLevel(Level::getDebug());\n\n\t\t// Note: categories with undefined level\n\t\tLoggerPtr INF_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.UNDEF\"));\n\t\tLoggerPtr INF_ERR_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR.UNDEF\"));\n\t\tLoggerPtr UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"UNDEF\"));\n\n", "meta": {"hash_id": "d62a480f742cdc458eec47f896f274a7766f820b684cf6b5b4655b4fd9404c0b"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 4, "content": "\t\tstd::string msg(\"Message \");\n\n\t\tPool pool;\n\n\t\t// These should all log.----------------------------\n\t\tLOG4CXX_FATAL(ERRlogger, createMessage(i, pool));\n\t\ti++; //0\n\t\tLOG4CXX_ERROR(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF, createMessage(i, pool));\n\t\ti++; // 2\n\t\tLOG4CXX_ERROR(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_UNDEF, createMessage(i, pool));\n\t\ti++; //6\n\t\tLOG4CXX_ERROR(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR, createMessage(i, pool));\n\t\ti++; // 10\n\t\tLOG4CXX_ERROR(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_FATAL(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_ERROR(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n", "meta": {"hash_id": "0643e27dfa25a080ad6a4bbe039b01d7ddad4efc7b1cd12ef714056035f5305e"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 5, "content": "\t\tLOG4CXX_FATAL(DEB, createMessage(i, pool));\n\t\ti++; //14\n\t\tLOG4CXX_ERROR(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(DEB, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(DEB, createMessage(i, pool));\n\t\ti++;\n\n\t\t// defaultLevel=DEBUG\n\t\tLOG4CXX_FATAL(UNDEF, createMessage(i, pool));\n\t\ti++; // 19\n\t\tLOG4CXX_ERROR(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\t// The following should not log\n\t\tLOG4CXX_WARN(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(ERRlogger, createMessage(i, pool));\n\t\ti++;\n\n\t\tLOG4CXX_DEBUG(INF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n", "meta": {"hash_id": "75f778ab13182174e87626bb5fa903b8d0b39484f2cf15d4e07b5ac87466c6cd"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 6, "content": "\t\tLOG4CXX_WARN(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_WARN(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_INFO(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\t\tLOG4CXX_DEBUG(INF_ERR_UNDEF, createMessage(i, pool));\n\t\ti++;\n\n\t\t// -------------------------------------------------\n\t\tLOG4CXX_INFO(INF, LOG4CXX_TEST_STR(\"Messages should bear numbers 0 through 23.\"));\n\t}\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\nprivate:\n\tstatic const File FILTERED;\n};\n\n\nconst File MinimumTestCase::FILTERED(\"output/minimumfiltered\");\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(MinimumTestCase);\n", "meta": {"hash_id": "5a7a750026bec5510142ad6866af3fdbbca09adb729612c41a95f122394a8431"}}]}], "golden_chunks": [{"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 3, "content": "\t\tLoggerPtr INF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF\"));\n\t\tINF->setLevel(Level::getInfo());\n\n\t\tLoggerPtr INF_ERR = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR\"));\n\t\tINF_ERR->setLevel(Level::getError());\n\n\t\tLoggerPtr DEB = Logger::getLogger(LOG4CXX_TEST_STR(\"DEB\"));\n\t\tDEB->setLevel(Level::getDebug());\n\n\t\t// Note: categories with undefined level\n\t\tLoggerPtr INF_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.UNDEF\"));\n\t\tLoggerPtr INF_ERR_UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"INF.ERR.UNDEF\"));\n\t\tLoggerPtr UNDEF = Logger::getLogger(LOG4CXX_TEST_STR(\"UNDEF\"));\n\n", "meta": {"hash_id": "d62a480f742cdc458eec47f896f274a7766f820b684cf6b5b4655b4fd9404c0b"}}, {"doc_uuid": "e884e6a9a3f013496b4c494449285d20f4832216ad5ed955e0189018361d8c76", "index": 2, "content": "\tvoid simple()\n\t{\n\t\tLayoutPtr layout = LayoutPtr(new SimpleLayout());\n\t\tAppenderPtr appender = FileAppenderPtr(new FileAppender(layout, LOG4CXX_STR(\"output/simple\"), false));\n\t\troot->addAppender(appender);\n\t\tcommon();\n\n\t\tLOGUNIT_ASSERT(Compare::compare(LOG4CXX_FILE(\"output/simple\"), LOG4CXX_FILE(\"witness/simple\")));\n\t}\n\n\tstd::string createMessage(int i, Pool & pool)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = 0;\n\n\t\t// In the lines below, the logger names are chosen as an aid in\n\t\t// remembering their level values. In general, the logger names\n\t\t// have no bearing to level values.\n\t\tLoggerPtr ERRlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"ERR\"));\n\t\tERRlogger->setLevel(Level::getError());\n\n", "meta": {"hash_id": "cd02e8e6bd721bdc9f4f6c1064486564fa92d83ec6a53ab6d032d495a27b9434"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/minimumtestcase.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 7, "source": "codebases/jsonl"}} +{"query": "How do you obtain an instance of NDCPatternConverter?", "answer": "An instance of NDCPatternConverter can be obtained using the static newInstance method, which takes a vector of options as a parameter:\n/**\n * Obtains an instance of NDCPatternConverter.\n * @param options options, may be null.\n * @return instance of NDCPatternConverter.\n */\nstatic PatternConverterPtr newInstance(\n const std::vector& options);", "golden_doc_uuids": ["0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b"], "golden_chunk_uuids": [["0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", 1]], "golden_documents": [{"uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/pattern/ndcpatternconverter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "5335428ec0d29fd674bd618036c8e990256262dec3ff1eb39766e67dbbb0d6e5"}}, {"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 1, "content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"hash_id": "98f1483b41450cb7902f43402fc2ea6920078e47a39605759ad6581abc983995"}}]}], "golden_chunks": [{"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 1, "content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"hash_id": "98f1483b41450cb7902f43402fc2ea6920078e47a39605759ad6581abc983995"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/pattern/ndcpatternconverter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How does the NDCPatternConverter format the logging event?", "answer": "The NDCPatternConverter formats the logging event by overriding the format method from its base class LoggingEventPatternConverter:\nvoid format(const spi::LoggingEventPtr& event,\n LogString& toAppendTo,\n helpers::Pool& p) const override;\nThis method takes the logging event, a LogString to append the formatted result to, and a Pool object for memory management.", "golden_doc_uuids": ["0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b"], "golden_chunk_uuids": [["0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", 1]], "golden_documents": [{"uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/pattern/ndcpatternconverter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "5335428ec0d29fd674bd618036c8e990256262dec3ff1eb39766e67dbbb0d6e5"}}, {"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 1, "content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"hash_id": "98f1483b41450cb7902f43402fc2ea6920078e47a39605759ad6581abc983995"}}]}], "golden_chunks": [{"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 1, "content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"hash_id": "98f1483b41450cb7902f43402fc2ea6920078e47a39605759ad6581abc983995"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/pattern/ndcpatternconverter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What is the inheritance hierarchy of NDCPatternConverter?", "answer": "NDCPatternConverter inherits from the LoggingEventPatternConverter class, as shown in the class declaration:\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter", "golden_doc_uuids": ["0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b"], "golden_chunk_uuids": [["0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", 1]], "golden_documents": [{"uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/pattern/ndcpatternconverter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "5335428ec0d29fd674bd618036c8e990256262dec3ff1eb39766e67dbbb0d6e5"}}, {"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 1, "content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"hash_id": "98f1483b41450cb7902f43402fc2ea6920078e47a39605759ad6581abc983995"}}]}], "golden_chunks": [{"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 1, "content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"hash_id": "98f1483b41450cb7902f43402fc2ea6920078e47a39605759ad6581abc983995"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/pattern/ndcpatternconverter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "What macros are used in the NDCPatternConverter class declaration?", "answer": "The NDCPatternConverter class uses the following macros:\n- LOG4CXX_EXPORT: Used for exporting the class in a shared library.\n- DECLARE_LOG4CXX_PATTERN(NDCPatternConverter): Declares the NDCPatternConverter pattern.\n- BEGIN_LOG4CXX_CAST_MAP() and END_LOG4CXX_CAST_MAP(): Used for defining a cast map for the class.\n- LOG4CXX_CAST_ENTRY(NDCPatternConverter) and LOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter): Used for specifying the cast entries in the cast map.", "golden_doc_uuids": ["0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b"], "golden_chunk_uuids": [["0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", 1]], "golden_documents": [{"uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/pattern/ndcpatternconverter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "5335428ec0d29fd674bd618036c8e990256262dec3ff1eb39766e67dbbb0d6e5"}}, {"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 1, "content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"hash_id": "98f1483b41450cb7902f43402fc2ea6920078e47a39605759ad6581abc983995"}}]}], "golden_chunks": [{"doc_uuid": "0726f860e5f7359166e66f5ae80e27f5d0429fbb33e4f4d5ed514d30192e622b", "index": 1, "content": "#ifndef _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n#define _LOG4CXX_PATTERN_NDC_PATTERN_CONVERTER\n\n#include \n\nnamespace LOG4CXX_NS\n{\nnamespace pattern\n{\n\n\n/**\n * Return the event's NDC in a StringBuffer.\n *\n *\n *\n */\nclass LOG4CXX_EXPORT NDCPatternConverter : public LoggingEventPatternConverter\n{\n\tpublic:\n\t\tDECLARE_LOG4CXX_PATTERN(NDCPatternConverter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(NDCPatternConverter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(LoggingEventPatternConverter)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tNDCPatternConverter();\n\n\t\t/**\n\t\t * Obtains an instance of NDCPatternConverter.\n\t\t * @param options options, may be null.\n\t\t * @return instance of NDCPatternConverter.\n\t\t */\n\t\tstatic PatternConverterPtr newInstance(\n\t\t\tconst std::vector& options);\n\n\t\tusing LoggingEventPatternConverter::format;\n\n\t\tvoid format(const spi::LoggingEventPtr& event,\n\t\t\tLogString& toAppendTo,\n\t\t\thelpers::Pool& p) const override;\n};\n}\n}\n#endif\n", "meta": {"hash_id": "98f1483b41450cb7902f43402fc2ea6920078e47a39605759ad6581abc983995"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/pattern/ndcpatternconverter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 2, "source": "codebases/jsonl"}} +{"query": "How does the test1() method test the FMTLayout?", "answer": "The test1() method first configures log4cxx using a properties file \"input/fmtLayout1.properties\". It then calls the common() method which logs messages at different levels (DEBUG, INFO, WARN, ERROR, FATAL) to the root logger and a logger named \"java.org.apache.log4j.PatternLayoutTest\". Finally, it compares the generated output file \"output/fmtlayout\" against an expected output file \"witness/patternLayout.1\" using the Compare::compare() method.", "golden_doc_uuids": ["fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733"], "golden_chunk_uuids": [["fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", 3], ["fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", 2], ["fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", 0]], "golden_documents": [{"uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \"testchar.h\"\n#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/iso8601filter.h\"\n#include \"util/absolutetimefilter.h\"\n#include \"util/relativetimefilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/threadfilter.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/filenamefilter.h\"\n#include \"vectorappender.h\"\n#include \n#include \n#include \n#include \n#include \n#include \n\n#define REGEX_STR(x) x\n#define PAT0 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* - Message [0-9]\\\\{1,2\\\\}\")\n#define PAT1 ISO8601_PAT REGEX_STR(\" \") PAT0\n#define PAT2 ABSOLUTE_DATE_AND_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT3 ABSOLUTE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT4 RELATIVE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT5 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* : Message [0-9]\\\\{1,2\\\\}\")\n#define PAT6 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) .*patternlayouttest.cpp\\\\([0-9]\\\\{1,4\\\\}\\\\): Message [0-9]\\\\{1,3\\\\}\")\n#define PAT11a REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ log4j.PatternLayoutTest: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT11b REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ root: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT12 REGEX_STR(\"^\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) \")\\\n\tREGEX_STR(\".*patternlayouttest.cpp([0-9]\\\\{1,4\\\\}): \")\\\n\tREGEX_STR(\"Message [0-9]\\\\{1,2\\\\}\")\n#define PAT_MDC_1 REGEX_STR(\"\")\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(FMTTestCase)\n{\n\tLOGUNIT_TEST_SUITE(FMTTestCase);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST(test1_expanded);\n\tLOGUNIT_TEST(test10);\n//\tLOGUNIT_TEST(test_date);\n\tLOGUNIT_TEST_SUITE_END();\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\tMDC::clear();\n\t\tlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"java.org.apache.log4j.PatternLayoutTest\"));\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tMDC::clear();\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid test1()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test1_expanded()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1_expanded.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test10()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout10.properties\"));\n\t\tcommon();\n\n\t\tControlFilter filter1;\n\t\tfilter1 << PAT6;\n\t\tThreadFilter filter2;\n\t\tLineNumberFilter filter3;\n\t\tFilenameFilter filenameFilter(__FILE__, \"patternlayouttest.cpp\");\n\n\n\t\tstd::vector filters;\n\t\tfilters.push_back(&filenameFilter);\n\t\tfilters.push_back(&filter1);\n\t\tfilters.push_back(&filter2);\n\t\tfilters.push_back(&filter3);\n\n\n\t\ttry\n\t\t{\n\t\t\tTransformer::transform(TEMP, FILTERED, filters);\n\t\t}\n\t\tcatch (UnexpectedFormatException& e)\n\t\t{\n\t\t\tstd::cout << \"UnexpectedFormatException :\" << e.what() << std::endl;\n\t\t\tthrow;\n\t\t}\n\n\t\tLOGUNIT_ASSERT(Compare::compare(FILTERED, LOG4CXX_FILE(\"witness/patternLayout.10\")));\n\t}\n\n\tvoid test_date(){\n\t\tstd::tm tm = {};\n\t\tstd::stringstream ss(\"2013-04-11 08:35:34\");\n\t\tss >> std::get_time(&tm, \"%Y-%m-%d %H:%M:%S\");\n\t\tauto tp = std::chrono::system_clock::from_time_t(std::mktime(&tm));\n\t\tuint64_t micros = std::chrono::duration_cast(tp.time_since_epoch()).count();\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction([micros](){\n\t\t\treturn micros;\n\t\t});\n\n\t\tlog4cxx::spi::LoggingEventPtr logEvt = std::make_shared(LOG4CXX_STR(\"foo\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t Level::getInfo(),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t LOG4CXX_STR(\"A Message\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t log4cxx::spi::LocationInfo::getLocationUnavailable());\n\t\tFMTLayout layout(LOG4CXX_STR(\"{d:%Y-%m-%d %H:%M:%S} {message}\"));\n\t\tLogString output;\n\t\tlog4cxx::helpers::Pool pool;\n\t\tlayout.format( output, logEvt, pool);\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction(nullptr);\n\n\t\tLOGUNIT_ASSERT_EQUAL(LOG4CXX_STR(\"2013-04-11 09:35:34 A Message\"), output);\n\t}\n\n\tstd::string createMessage(Pool & pool, int i)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = -1;\n\n\t\tPool pool;\n\n\n\t\tLOG4CXX_DEBUG(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_DEBUG(root, createMessage(pool, i));\n\n\t\tLOG4CXX_INFO(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_INFO(root, createMessage(pool, i));\n\n\t\tLOG4CXX_WARN(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_WARN(root, createMessage(pool, i));\n\n\t\tLOG4CXX_ERROR(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_ERROR(root, createMessage(pool, i));\n\n\t\tLOG4CXX_FATAL(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_FATAL(root, createMessage(pool, i));\n\t}\n\n\tprivate:\n\t\tstatic const LogString FILTERED;\n\t\tstatic const LogString TEMP;\n\n};\n\nconst LogString FMTTestCase::TEMP(LOG4CXX_STR(\"output/fmtlayout\"));\nconst LogString FMTTestCase::FILTERED(LOG4CXX_STR(\"output/fmtlayoutfiltered\"));\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(FMTTestCase);\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/fmttest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \"testchar.h\"\n", "meta": {"hash_id": "e9c56d29140035b357c7780c1330d82be00fdcf534b9473e69b314f72f9f9c2d"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 1, "content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/iso8601filter.h\"\n#include \"util/absolutetimefilter.h\"\n#include \"util/relativetimefilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/threadfilter.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/filenamefilter.h\"\n#include \"vectorappender.h\"\n#include \n#include \n#include \n#include \n#include \n#include \n\n#define REGEX_STR(x) x\n#define PAT0 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* - Message [0-9]\\\\{1,2\\\\}\")\n#define PAT1 ISO8601_PAT REGEX_STR(\" \") PAT0\n#define PAT2 ABSOLUTE_DATE_AND_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT3 ABSOLUTE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT4 RELATIVE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT5 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* : Message [0-9]\\\\{1,2\\\\}\")\n", "meta": {"hash_id": "bd13fc5e0f78c0b35ae2fc5b16c9c63cef18abd87a1eeddef5735eab7400b410"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 2, "content": "#define PAT6 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) .*patternlayouttest.cpp\\\\([0-9]\\\\{1,4\\\\}\\\\): Message [0-9]\\\\{1,3\\\\}\")\n#define PAT11a REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ log4j.PatternLayoutTest: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT11b REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ root: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT12 REGEX_STR(\"^\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) \")\\\n\tREGEX_STR(\".*patternlayouttest.cpp([0-9]\\\\{1,4\\\\}): \")\\\n\tREGEX_STR(\"Message [0-9]\\\\{1,2\\\\}\")\n#define PAT_MDC_1 REGEX_STR(\"\")\n\n", "meta": {"hash_id": "90c095f16bb7b73c8138226c8cb137fcfe3e29c982c3ae575338c4f1cd5c2811"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 3, "content": "using namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(FMTTestCase)\n{\n\tLOGUNIT_TEST_SUITE(FMTTestCase);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST(test1_expanded);\n\tLOGUNIT_TEST(test10);\n//\tLOGUNIT_TEST(test_date);\n\tLOGUNIT_TEST_SUITE_END();\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\tMDC::clear();\n\t\tlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"java.org.apache.log4j.PatternLayoutTest\"));\n\t}\n\n", "meta": {"hash_id": "3edd1a75f5420609854ef20d4cef9e3774d564e063d5aab001dd23c0efc724b2"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 4, "content": "\tvoid tearDown()\n\t{\n\t\tMDC::clear();\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid test1()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test1_expanded()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1_expanded.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n", "meta": {"hash_id": "761c753883136ab3f7ad8e593541b479dcd4883222bf1903e785890589f20d10"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 5, "content": "\tvoid test10()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout10.properties\"));\n\t\tcommon();\n\n\t\tControlFilter filter1;\n\t\tfilter1 << PAT6;\n\t\tThreadFilter filter2;\n\t\tLineNumberFilter filter3;\n\t\tFilenameFilter filenameFilter(__FILE__, \"patternlayouttest.cpp\");\n\n\n\t\tstd::vector filters;\n\t\tfilters.push_back(&filenameFilter);\n\t\tfilters.push_back(&filter1);\n\t\tfilters.push_back(&filter2);\n\t\tfilters.push_back(&filter3);\n\n", "meta": {"hash_id": "b4c83384fef5ab4ea970cafb82f12e69f0fcb53f4e88eff722bff7f88881dab3"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 6, "content": "\n\t\ttry\n\t\t{\n\t\t\tTransformer::transform(TEMP, FILTERED, filters);\n\t\t}\n\t\tcatch (UnexpectedFormatException& e)\n\t\t{\n\t\t\tstd::cout << \"UnexpectedFormatException :\" << e.what() << std::endl;\n\t\t\tthrow;\n\t\t}\n\n\t\tLOGUNIT_ASSERT(Compare::compare(FILTERED, LOG4CXX_FILE(\"witness/patternLayout.10\")));\n\t}\n\n\tvoid test_date(){\n\t\tstd::tm tm = {};\n\t\tstd::stringstream ss(\"2013-04-11 08:35:34\");\n\t\tss >> std::get_time(&tm, \"%Y-%m-%d %H:%M:%S\");\n\t\tauto tp = std::chrono::system_clock::from_time_t(std::mktime(&tm));\n\t\tuint64_t micros = std::chrono::duration_cast(tp.time_since_epoch()).count();\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction([micros](){\n\t\t\treturn micros;\n\t\t});\n\n", "meta": {"hash_id": "c46aaae84f6b9715870e81d032143af14414958268efd0c4869894170ef01f66"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 7, "content": "\t\tlog4cxx::spi::LoggingEventPtr logEvt = std::make_shared(LOG4CXX_STR(\"foo\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t Level::getInfo(),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t LOG4CXX_STR(\"A Message\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t log4cxx::spi::LocationInfo::getLocationUnavailable());\n\t\tFMTLayout layout(LOG4CXX_STR(\"{d:%Y-%m-%d %H:%M:%S} {message}\"));\n\t\tLogString output;\n\t\tlog4cxx::helpers::Pool pool;\n\t\tlayout.format( output, logEvt, pool);\n\n", "meta": {"hash_id": "86cabe576af5d9a6ad2edf507c6489ccd437823eaa40dc779c901fa6c4e9d4a2"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 8, "content": "\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction(nullptr);\n\n\t\tLOGUNIT_ASSERT_EQUAL(LOG4CXX_STR(\"2013-04-11 09:35:34 A Message\"), output);\n\t}\n\n\tstd::string createMessage(Pool & pool, int i)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = -1;\n\n\t\tPool pool;\n\n\n\t\tLOG4CXX_DEBUG(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_DEBUG(root, createMessage(pool, i));\n\n\t\tLOG4CXX_INFO(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_INFO(root, createMessage(pool, i));\n\n\t\tLOG4CXX_WARN(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_WARN(root, createMessage(pool, i));\n\n", "meta": {"hash_id": "e41054e5960beb7dfc12deda1825d4fe2555b70b1e16ffd91a039e1fcaf0d78a"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 9, "content": "\t\tLOG4CXX_ERROR(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_ERROR(root, createMessage(pool, i));\n\n\t\tLOG4CXX_FATAL(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_FATAL(root, createMessage(pool, i));\n\t}\n\n\tprivate:\n\t\tstatic const LogString FILTERED;\n\t\tstatic const LogString TEMP;\n\n};\n\nconst LogString FMTTestCase::TEMP(LOG4CXX_STR(\"output/fmtlayout\"));\nconst LogString FMTTestCase::FILTERED(LOG4CXX_STR(\"output/fmtlayoutfiltered\"));\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(FMTTestCase);\n", "meta": {"hash_id": "7226336f233d19edcb08ef3540df396e15e083a5a99a37d98c04e91e6a712937"}}]}], "golden_chunks": [{"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 3, "content": "using namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(FMTTestCase)\n{\n\tLOGUNIT_TEST_SUITE(FMTTestCase);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST(test1_expanded);\n\tLOGUNIT_TEST(test10);\n//\tLOGUNIT_TEST(test_date);\n\tLOGUNIT_TEST_SUITE_END();\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\tMDC::clear();\n\t\tlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"java.org.apache.log4j.PatternLayoutTest\"));\n\t}\n\n", "meta": {"hash_id": "3edd1a75f5420609854ef20d4cef9e3774d564e063d5aab001dd23c0efc724b2"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 2, "content": "#define PAT6 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) .*patternlayouttest.cpp\\\\([0-9]\\\\{1,4\\\\}\\\\): Message [0-9]\\\\{1,3\\\\}\")\n#define PAT11a REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ log4j.PatternLayoutTest: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT11b REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ root: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT12 REGEX_STR(\"^\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) \")\\\n\tREGEX_STR(\".*patternlayouttest.cpp([0-9]\\\\{1,4\\\\}): \")\\\n\tREGEX_STR(\"Message [0-9]\\\\{1,2\\\\}\")\n#define PAT_MDC_1 REGEX_STR(\"\")\n\n", "meta": {"hash_id": "90c095f16bb7b73c8138226c8cb137fcfe3e29c982c3ae575338c4f1cd5c2811"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \"testchar.h\"\n", "meta": {"hash_id": "e9c56d29140035b357c7780c1330d82be00fdcf534b9473e69b314f72f9f9c2d"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/fmttest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "What is the purpose of the common() method?", "answer": "The common() method generates log output by logging messages at different levels (DEBUG, INFO, WARN, ERROR, FATAL) to the root logger and a logger named \"java.org.apache.log4j.PatternLayoutTest\". It is called by the various test methods to produce the output that will be compared against expected results. The messages it logs are of the form \"Message #\" where # is an incrementing number.", "golden_doc_uuids": ["fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733"], "golden_chunk_uuids": [["fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", 8]], "golden_documents": [{"uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \"testchar.h\"\n#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/iso8601filter.h\"\n#include \"util/absolutetimefilter.h\"\n#include \"util/relativetimefilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/threadfilter.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/filenamefilter.h\"\n#include \"vectorappender.h\"\n#include \n#include \n#include \n#include \n#include \n#include \n\n#define REGEX_STR(x) x\n#define PAT0 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* - Message [0-9]\\\\{1,2\\\\}\")\n#define PAT1 ISO8601_PAT REGEX_STR(\" \") PAT0\n#define PAT2 ABSOLUTE_DATE_AND_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT3 ABSOLUTE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT4 RELATIVE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT5 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* : Message [0-9]\\\\{1,2\\\\}\")\n#define PAT6 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) .*patternlayouttest.cpp\\\\([0-9]\\\\{1,4\\\\}\\\\): Message [0-9]\\\\{1,3\\\\}\")\n#define PAT11a REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ log4j.PatternLayoutTest: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT11b REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ root: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT12 REGEX_STR(\"^\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) \")\\\n\tREGEX_STR(\".*patternlayouttest.cpp([0-9]\\\\{1,4\\\\}): \")\\\n\tREGEX_STR(\"Message [0-9]\\\\{1,2\\\\}\")\n#define PAT_MDC_1 REGEX_STR(\"\")\n\nusing namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(FMTTestCase)\n{\n\tLOGUNIT_TEST_SUITE(FMTTestCase);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST(test1_expanded);\n\tLOGUNIT_TEST(test10);\n//\tLOGUNIT_TEST(test_date);\n\tLOGUNIT_TEST_SUITE_END();\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\tMDC::clear();\n\t\tlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"java.org.apache.log4j.PatternLayoutTest\"));\n\t}\n\n\tvoid tearDown()\n\t{\n\t\tMDC::clear();\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid test1()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test1_expanded()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1_expanded.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test10()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout10.properties\"));\n\t\tcommon();\n\n\t\tControlFilter filter1;\n\t\tfilter1 << PAT6;\n\t\tThreadFilter filter2;\n\t\tLineNumberFilter filter3;\n\t\tFilenameFilter filenameFilter(__FILE__, \"patternlayouttest.cpp\");\n\n\n\t\tstd::vector filters;\n\t\tfilters.push_back(&filenameFilter);\n\t\tfilters.push_back(&filter1);\n\t\tfilters.push_back(&filter2);\n\t\tfilters.push_back(&filter3);\n\n\n\t\ttry\n\t\t{\n\t\t\tTransformer::transform(TEMP, FILTERED, filters);\n\t\t}\n\t\tcatch (UnexpectedFormatException& e)\n\t\t{\n\t\t\tstd::cout << \"UnexpectedFormatException :\" << e.what() << std::endl;\n\t\t\tthrow;\n\t\t}\n\n\t\tLOGUNIT_ASSERT(Compare::compare(FILTERED, LOG4CXX_FILE(\"witness/patternLayout.10\")));\n\t}\n\n\tvoid test_date(){\n\t\tstd::tm tm = {};\n\t\tstd::stringstream ss(\"2013-04-11 08:35:34\");\n\t\tss >> std::get_time(&tm, \"%Y-%m-%d %H:%M:%S\");\n\t\tauto tp = std::chrono::system_clock::from_time_t(std::mktime(&tm));\n\t\tuint64_t micros = std::chrono::duration_cast(tp.time_since_epoch()).count();\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction([micros](){\n\t\t\treturn micros;\n\t\t});\n\n\t\tlog4cxx::spi::LoggingEventPtr logEvt = std::make_shared(LOG4CXX_STR(\"foo\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t Level::getInfo(),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t LOG4CXX_STR(\"A Message\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t log4cxx::spi::LocationInfo::getLocationUnavailable());\n\t\tFMTLayout layout(LOG4CXX_STR(\"{d:%Y-%m-%d %H:%M:%S} {message}\"));\n\t\tLogString output;\n\t\tlog4cxx::helpers::Pool pool;\n\t\tlayout.format( output, logEvt, pool);\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction(nullptr);\n\n\t\tLOGUNIT_ASSERT_EQUAL(LOG4CXX_STR(\"2013-04-11 09:35:34 A Message\"), output);\n\t}\n\n\tstd::string createMessage(Pool & pool, int i)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = -1;\n\n\t\tPool pool;\n\n\n\t\tLOG4CXX_DEBUG(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_DEBUG(root, createMessage(pool, i));\n\n\t\tLOG4CXX_INFO(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_INFO(root, createMessage(pool, i));\n\n\t\tLOG4CXX_WARN(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_WARN(root, createMessage(pool, i));\n\n\t\tLOG4CXX_ERROR(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_ERROR(root, createMessage(pool, i));\n\n\t\tLOG4CXX_FATAL(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_FATAL(root, createMessage(pool, i));\n\t}\n\n\tprivate:\n\t\tstatic const LogString FILTERED;\n\t\tstatic const LogString TEMP;\n\n};\n\nconst LogString FMTTestCase::TEMP(LOG4CXX_STR(\"output/fmtlayout\"));\nconst LogString FMTTestCase::FILTERED(LOG4CXX_STR(\"output/fmtlayoutfiltered\"));\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(FMTTestCase);\n", "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/fmttest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 10, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n#include \"logunit.h\"\n#include \"testchar.h\"\n", "meta": {"hash_id": "e9c56d29140035b357c7780c1330d82be00fdcf534b9473e69b314f72f9f9c2d"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 1, "content": "#include \"util/compare.h\"\n#include \"util/transformer.h\"\n#include \"util/absolutedateandtimefilter.h\"\n#include \"util/iso8601filter.h\"\n#include \"util/absolutetimefilter.h\"\n#include \"util/relativetimefilter.h\"\n#include \"util/controlfilter.h\"\n#include \"util/threadfilter.h\"\n#include \"util/linenumberfilter.h\"\n#include \"util/filenamefilter.h\"\n#include \"vectorappender.h\"\n#include \n#include \n#include \n#include \n#include \n#include \n\n#define REGEX_STR(x) x\n#define PAT0 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* - Message [0-9]\\\\{1,2\\\\}\")\n#define PAT1 ISO8601_PAT REGEX_STR(\" \") PAT0\n#define PAT2 ABSOLUTE_DATE_AND_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT3 ABSOLUTE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT4 RELATIVE_TIME_PAT REGEX_STR(\" \") PAT0\n#define PAT5 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO|WARN|ERROR|FATAL) .* : Message [0-9]\\\\{1,2\\\\}\")\n", "meta": {"hash_id": "bd13fc5e0f78c0b35ae2fc5b16c9c63cef18abd87a1eeddef5735eab7400b410"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 2, "content": "#define PAT6 REGEX_STR(\"\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) .*patternlayouttest.cpp\\\\([0-9]\\\\{1,4\\\\}\\\\): Message [0-9]\\\\{1,3\\\\}\")\n#define PAT11a REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ log4j.PatternLayoutTest: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT11b REGEX_STR(\"^(DEBUG|INFO |WARN |ERROR|FATAL) \\\\[[0-9A-FXx]*]\\\\ root: Message [0-9]\\\\{1,2\\\\}\")\n#define PAT12 REGEX_STR(\"^\\\\[[0-9A-FXx]*]\\\\ (DEBUG|INFO |WARN |ERROR|FATAL) \")\\\n\tREGEX_STR(\".*patternlayouttest.cpp([0-9]\\\\{1,4\\\\}): \")\\\n\tREGEX_STR(\"Message [0-9]\\\\{1,2\\\\}\")\n#define PAT_MDC_1 REGEX_STR(\"\")\n\n", "meta": {"hash_id": "90c095f16bb7b73c8138226c8cb137fcfe3e29c982c3ae575338c4f1cd5c2811"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 3, "content": "using namespace log4cxx;\nusing namespace log4cxx::helpers;\n\nLOGUNIT_CLASS(FMTTestCase)\n{\n\tLOGUNIT_TEST_SUITE(FMTTestCase);\n\tLOGUNIT_TEST(test1);\n\tLOGUNIT_TEST(test1_expanded);\n\tLOGUNIT_TEST(test10);\n//\tLOGUNIT_TEST(test_date);\n\tLOGUNIT_TEST_SUITE_END();\n\n\tLoggerPtr root;\n\tLoggerPtr logger;\n\npublic:\n\tvoid setUp()\n\t{\n\t\troot = Logger::getRootLogger();\n\t\tMDC::clear();\n\t\tlogger = Logger::getLogger(LOG4CXX_TEST_STR(\"java.org.apache.log4j.PatternLayoutTest\"));\n\t}\n\n", "meta": {"hash_id": "3edd1a75f5420609854ef20d4cef9e3774d564e063d5aab001dd23c0efc724b2"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 4, "content": "\tvoid tearDown()\n\t{\n\t\tMDC::clear();\n\t\tauto rep = root->getLoggerRepository();\n\n\t\tif (rep)\n\t\t{\n\t\t\trep->resetConfiguration();\n\t\t}\n\t}\n\n\tvoid test1()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n\tvoid test1_expanded()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout1_expanded.properties\"));\n\t\tcommon();\n\t\tLOGUNIT_ASSERT(Compare::compare(TEMP, LOG4CXX_FILE(\"witness/patternLayout.1\")));\n\t}\n\n", "meta": {"hash_id": "761c753883136ab3f7ad8e593541b479dcd4883222bf1903e785890589f20d10"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 5, "content": "\tvoid test10()\n\t{\n\t\tPropertyConfigurator::configure(LOG4CXX_FILE(\"input/fmtLayout10.properties\"));\n\t\tcommon();\n\n\t\tControlFilter filter1;\n\t\tfilter1 << PAT6;\n\t\tThreadFilter filter2;\n\t\tLineNumberFilter filter3;\n\t\tFilenameFilter filenameFilter(__FILE__, \"patternlayouttest.cpp\");\n\n\n\t\tstd::vector filters;\n\t\tfilters.push_back(&filenameFilter);\n\t\tfilters.push_back(&filter1);\n\t\tfilters.push_back(&filter2);\n\t\tfilters.push_back(&filter3);\n\n", "meta": {"hash_id": "b4c83384fef5ab4ea970cafb82f12e69f0fcb53f4e88eff722bff7f88881dab3"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 6, "content": "\n\t\ttry\n\t\t{\n\t\t\tTransformer::transform(TEMP, FILTERED, filters);\n\t\t}\n\t\tcatch (UnexpectedFormatException& e)\n\t\t{\n\t\t\tstd::cout << \"UnexpectedFormatException :\" << e.what() << std::endl;\n\t\t\tthrow;\n\t\t}\n\n\t\tLOGUNIT_ASSERT(Compare::compare(FILTERED, LOG4CXX_FILE(\"witness/patternLayout.10\")));\n\t}\n\n\tvoid test_date(){\n\t\tstd::tm tm = {};\n\t\tstd::stringstream ss(\"2013-04-11 08:35:34\");\n\t\tss >> std::get_time(&tm, \"%Y-%m-%d %H:%M:%S\");\n\t\tauto tp = std::chrono::system_clock::from_time_t(std::mktime(&tm));\n\t\tuint64_t micros = std::chrono::duration_cast(tp.time_since_epoch()).count();\n\n\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction([micros](){\n\t\t\treturn micros;\n\t\t});\n\n", "meta": {"hash_id": "c46aaae84f6b9715870e81d032143af14414958268efd0c4869894170ef01f66"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 7, "content": "\t\tlog4cxx::spi::LoggingEventPtr logEvt = std::make_shared(LOG4CXX_STR(\"foo\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t Level::getInfo(),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t LOG4CXX_STR(\"A Message\"),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t log4cxx::spi::LocationInfo::getLocationUnavailable());\n\t\tFMTLayout layout(LOG4CXX_STR(\"{d:%Y-%m-%d %H:%M:%S} {message}\"));\n\t\tLogString output;\n\t\tlog4cxx::helpers::Pool pool;\n\t\tlayout.format( output, logEvt, pool);\n\n", "meta": {"hash_id": "86cabe576af5d9a6ad2edf507c6489ccd437823eaa40dc779c901fa6c4e9d4a2"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 8, "content": "\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction(nullptr);\n\n\t\tLOGUNIT_ASSERT_EQUAL(LOG4CXX_STR(\"2013-04-11 09:35:34 A Message\"), output);\n\t}\n\n\tstd::string createMessage(Pool & pool, int i)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = -1;\n\n\t\tPool pool;\n\n\n\t\tLOG4CXX_DEBUG(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_DEBUG(root, createMessage(pool, i));\n\n\t\tLOG4CXX_INFO(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_INFO(root, createMessage(pool, i));\n\n\t\tLOG4CXX_WARN(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_WARN(root, createMessage(pool, i));\n\n", "meta": {"hash_id": "e41054e5960beb7dfc12deda1825d4fe2555b70b1e16ffd91a039e1fcaf0d78a"}}, {"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 9, "content": "\t\tLOG4CXX_ERROR(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_ERROR(root, createMessage(pool, i));\n\n\t\tLOG4CXX_FATAL(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_FATAL(root, createMessage(pool, i));\n\t}\n\n\tprivate:\n\t\tstatic const LogString FILTERED;\n\t\tstatic const LogString TEMP;\n\n};\n\nconst LogString FMTTestCase::TEMP(LOG4CXX_STR(\"output/fmtlayout\"));\nconst LogString FMTTestCase::FILTERED(LOG4CXX_STR(\"output/fmtlayoutfiltered\"));\n\n\nLOGUNIT_TEST_SUITE_REGISTRATION(FMTTestCase);\n", "meta": {"hash_id": "7226336f233d19edcb08ef3540df396e15e083a5a99a37d98c04e91e6a712937"}}]}], "golden_chunks": [{"doc_uuid": "fa92d94e026aa09f1ac535418c60e3f3b20d02149a0469d4abdeea08f280f733", "index": 8, "content": "\t\tlog4cxx::helpers::Date::setGetCurrentTimeFunction(nullptr);\n\n\t\tLOGUNIT_ASSERT_EQUAL(LOG4CXX_STR(\"2013-04-11 09:35:34 A Message\"), output);\n\t}\n\n\tstd::string createMessage(Pool & pool, int i)\n\t{\n\t\tstd::string msg(\"Message \");\n\t\tmsg.append(pool.itoa(i));\n\t\treturn msg;\n\t}\n\n\tvoid common()\n\t{\n\t\tint i = -1;\n\n\t\tPool pool;\n\n\n\t\tLOG4CXX_DEBUG(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_DEBUG(root, createMessage(pool, i));\n\n\t\tLOG4CXX_INFO(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_INFO(root, createMessage(pool, i));\n\n\t\tLOG4CXX_WARN(logger, createMessage(pool, ++i));\n\t\tLOG4CXX_WARN(root, createMessage(pool, i));\n\n", "meta": {"hash_id": "e41054e5960beb7dfc12deda1825d4fe2555b70b1e16ffd91a039e1fcaf0d78a"}}], "meta": {"doctype": "codebase", "relative_path": "/src/test/cpp/fmttest.cpp", "repo_name": "apache/logging-log4cxx", "num_chunks": 10, "source": "codebases/jsonl"}} +{"query": "How do you construct a BufferedWriter object?", "answer": "There are two constructor options for creating a BufferedWriter object:\nBufferedWriter(WriterPtr& out);\nBufferedWriter(WriterPtr& out, size_t sz); \nBoth constructors take a WriterPtr reference as the first argument, which represents the underlying character-output stream. The second constructor also accepts a size_t parameter sz, likely specifying the buffer size.", "golden_doc_uuids": ["4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f"], "golden_chunk_uuids": [["4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", 2]], "golden_documents": [{"uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#ifndef _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n#define _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n#include \n\nnamespace LOG4CXX_NS\n{\n\nnamespace helpers\n{\n\n/**\n* Writes text to a character-output stream buffering\n* requests to increase efficiency.\n*/\nclass LOG4CXX_EXPORT BufferedWriter : public Writer\n{\n\tprivate:\n\t\tLOG4CXX_DECLARE_PRIVATE_MEMBER_PTR(BufferedWriterPriv, m_priv)\n\n\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/helpers/bufferedwriter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "95efb50451dd7d97d301f5caa8e01173d002b19e8431df5aadd20e60f58cbd4b"}}, {"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 1, "content": "#ifndef _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n#define _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n#include \n\nnamespace LOG4CXX_NS\n{\n\nnamespace helpers\n{\n\n/**\n* Writes text to a character-output stream buffering\n* requests to increase efficiency.\n*/\nclass LOG4CXX_EXPORT BufferedWriter : public Writer\n{\n\tprivate:\n\t\tLOG4CXX_DECLARE_PRIVATE_MEMBER_PTR(BufferedWriterPriv, m_priv)\n\n", "meta": {"hash_id": "1307f8739547ca20d3cd6d26875a5e3d883427a7eaef92f7e6059bae44045195"}}, {"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 2, "content": "\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", "meta": {"hash_id": "6881f4a899afb527b74eef1472f1dd50f31b78c865e5a2c22e0a0fabf7e684e5"}}]}], "golden_chunks": [{"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 2, "content": "\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", "meta": {"hash_id": "6881f4a899afb527b74eef1472f1dd50f31b78c865e5a2c22e0a0fabf7e684e5"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/helpers/bufferedwriter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "What methods does BufferedWriter override from its parent class?", "answer": "BufferedWriter overrides the following methods from its parent Writer class:\nvoid close(Pool& p) override;\nvoid flush(Pool& p) override;\nvoid write(const LogString& str, Pool& p) override;\nThese methods are used to close the writer, flush the buffer, and write a string to the underlying output stream, respectively. They all take a reference to a Pool object, likely for memory management purposes.", "golden_doc_uuids": ["4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f"], "golden_chunk_uuids": [["4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", 2]], "golden_documents": [{"uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#ifndef _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n#define _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n#include \n\nnamespace LOG4CXX_NS\n{\n\nnamespace helpers\n{\n\n/**\n* Writes text to a character-output stream buffering\n* requests to increase efficiency.\n*/\nclass LOG4CXX_EXPORT BufferedWriter : public Writer\n{\n\tprivate:\n\t\tLOG4CXX_DECLARE_PRIVATE_MEMBER_PTR(BufferedWriterPriv, m_priv)\n\n\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/helpers/bufferedwriter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "95efb50451dd7d97d301f5caa8e01173d002b19e8431df5aadd20e60f58cbd4b"}}, {"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 1, "content": "#ifndef _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n#define _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n#include \n\nnamespace LOG4CXX_NS\n{\n\nnamespace helpers\n{\n\n/**\n* Writes text to a character-output stream buffering\n* requests to increase efficiency.\n*/\nclass LOG4CXX_EXPORT BufferedWriter : public Writer\n{\n\tprivate:\n\t\tLOG4CXX_DECLARE_PRIVATE_MEMBER_PTR(BufferedWriterPriv, m_priv)\n\n", "meta": {"hash_id": "1307f8739547ca20d3cd6d26875a5e3d883427a7eaef92f7e6059bae44045195"}}, {"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 2, "content": "\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", "meta": {"hash_id": "6881f4a899afb527b74eef1472f1dd50f31b78c865e5a2c22e0a0fabf7e684e5"}}]}], "golden_chunks": [{"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 2, "content": "\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", "meta": {"hash_id": "6881f4a899afb527b74eef1472f1dd50f31b78c865e5a2c22e0a0fabf7e684e5"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/helpers/bufferedwriter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} +{"query": "How does BufferedWriter handle object destruction?", "answer": "BufferedWriter defines a virtual destructor:\nvirtual ~BufferedWriter();\nThis ensures that when a BufferedWriter object is deleted through a pointer to its base Writer class, the correct destructor is called to properly clean up the BufferedWriter object.", "golden_doc_uuids": ["4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f"], "golden_chunk_uuids": [["4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", 2]], "golden_documents": [{"uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#ifndef _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n#define _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n#include \n\nnamespace LOG4CXX_NS\n{\n\nnamespace helpers\n{\n\n/**\n* Writes text to a character-output stream buffering\n* requests to increase efficiency.\n*/\nclass LOG4CXX_EXPORT BufferedWriter : public Writer\n{\n\tprivate:\n\t\tLOG4CXX_DECLARE_PRIVATE_MEMBER_PTR(BufferedWriterPriv, m_priv)\n\n\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/helpers/bufferedwriter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}, "chunks": [{"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 0, "content": "/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n", "meta": {"hash_id": "95efb50451dd7d97d301f5caa8e01173d002b19e8431df5aadd20e60f58cbd4b"}}, {"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 1, "content": "#ifndef _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n#define _LOG4CXX_HELPERS_BUFFEREDWRITER_H\n\n#include \n\nnamespace LOG4CXX_NS\n{\n\nnamespace helpers\n{\n\n/**\n* Writes text to a character-output stream buffering\n* requests to increase efficiency.\n*/\nclass LOG4CXX_EXPORT BufferedWriter : public Writer\n{\n\tprivate:\n\t\tLOG4CXX_DECLARE_PRIVATE_MEMBER_PTR(BufferedWriterPriv, m_priv)\n\n", "meta": {"hash_id": "1307f8739547ca20d3cd6d26875a5e3d883427a7eaef92f7e6059bae44045195"}}, {"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 2, "content": "\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", "meta": {"hash_id": "6881f4a899afb527b74eef1472f1dd50f31b78c865e5a2c22e0a0fabf7e684e5"}}]}], "golden_chunks": [{"doc_uuid": "4687d993285f0820ff87223e5927490b5359c788e3e11b14f8de2c4f77c8f60f", "index": 2, "content": "\tpublic:\n\t\tDECLARE_ABSTRACT_LOG4CXX_OBJECT(BufferedWriter)\n\t\tBEGIN_LOG4CXX_CAST_MAP()\n\t\tLOG4CXX_CAST_ENTRY(BufferedWriter)\n\t\tLOG4CXX_CAST_ENTRY_CHAIN(Writer)\n\t\tEND_LOG4CXX_CAST_MAP()\n\n\t\tBufferedWriter(WriterPtr& out);\n\t\tBufferedWriter(WriterPtr& out, size_t sz);\n\t\tvirtual ~BufferedWriter();\n\n\t\tvoid close(Pool& p) override;\n\t\tvoid flush(Pool& p) override;\n\t\tvoid write(const LogString& str, Pool& p) override;\n\n\tprivate:\n\t\tBufferedWriter(const BufferedWriter&);\n\t\tBufferedWriter& operator=(const BufferedWriter&);\n};\n\n} // namespace helpers\n\n} //namespace log4cxx\n\n#endif //_LOG4CXX_HELPERS_BUFFEREDWRITER_H\n", "meta": {"hash_id": "6881f4a899afb527b74eef1472f1dd50f31b78c865e5a2c22e0a0fabf7e684e5"}}], "meta": {"doctype": "codebase", "relative_path": "/src/main/include/log4cxx/helpers/bufferedwriter.h", "repo_name": "apache/logging-log4cxx", "num_chunks": 3, "source": "codebases/jsonl"}} diff --git a/experiments/denser_data.py b/experiments/denser_data.py new file mode 100644 index 0000000..353d658 --- /dev/null +++ b/experiments/denser_data.py @@ -0,0 +1,18 @@ +from denser_retriever.utils import ( + load_qrels, + load_queries, +) +import os + + +class DenserData: + def __init__(self, dir_path): + self.data_dir = dir_path + + def load_queries(self): + queries = load_queries(os.path.join(self.data_dir, 'queries.jsonl')) + return queries + + def load_qrels(self): + qrels = load_qrels(os.path.join(self.data_dir, 'qrels.jsonl')) + return qrels diff --git a/experiments/train_and_test.py b/experiments/train_and_test.py index f76fc8a..708e809 100644 --- a/experiments/train_and_test.py +++ b/experiments/train_and_test.py @@ -2,6 +2,7 @@ import os import sys import json +import shutil from langchain_core.documents import Document import xgboost as xgb @@ -16,8 +17,9 @@ from denser_retriever.reranker import HFReranker from denser_retriever.retriever import DenserRetriever from denser_retriever.vectordb.milvus import MilvusDenserVectorDB -from denser_retriever.embeddings import SentenceTransformerEmbeddings +from denser_retriever.embeddings import VoyageAPIEmbeddings from experiments.hf_data_loader import HFDataLoader +from experiments.denser_data import DenserData from denser_retriever.utils import ( evaluate, save_queries, @@ -25,7 +27,7 @@ load_qrels, docs_to_dict, ) -from experiments.utils import prepare_xgbdata, save_HF_corpus_as_docs +from utils import prepare_xgbdata, save_HF_corpus_as_docs, copy_file logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -61,32 +63,41 @@ def __init__(self, dataset_name, drop_old): auto_id=True, drop_old=drop_old ), - reranker=HFReranker(model_name="cross-encoder/ms-marco-MiniLM-L-6-v2", top_k=100), - embeddings=SentenceTransformerEmbeddings( - "Snowflake/snowflake-arctic-embed-m", 768, False - ), + reranker=HFReranker(model_name="jinaai/jina-reranker-v2-base-multilingual", top_k=100, + automodel_args={"torch_dtype": "float32"}, trust_remote_code=True), + embeddings=VoyageAPIEmbeddings(api_key="YOUR_API_KEY", + model_name="voyage-2", embedding_size=1024), gradient_boost=None ) + self.max_query_size = 0 + self.max_query_len = 2000 self.max_doc_size = 0 - self.max_query_size = 8000 + self.max_doc_len = 8000 def ingest(self, dataset_name, split): - corpus, queries, qrels = HFDataLoader( - hf_repo=dataset_name, - hf_repo_qrels=None, - streaming=False, - keep_in_memory=False, - ).load(split=split) - exp_dir = os.path.join(self.output_prefix, split) if not os.path.exists(exp_dir): os.makedirs(exp_dir) passage_file = os.path.join(exp_dir, "passages.jsonl") - save_HF_corpus_as_docs( - corpus, passage_file, self.max_doc_size - ) + if dataset_name == 'anthropic_base': + copy_file('experiments/data/contextual-embeddings/data_base/passages.jsonl', passage_file, + self.max_doc_size) + elif dataset_name == 'anthropic_context': + copy_file('experiments/data/contextual-embeddings/data_context/passages.jsonl', passage_file, + self.max_doc_size) + else: + corpus, _, _ = HFDataLoader( + hf_repo=dataset_name, + hf_repo_qrels=None, + streaming=False, + keep_in_memory=False, + ).load(split=split) + + save_HF_corpus_as_docs( + corpus, passage_file, self.max_doc_size + ) out = open(passage_file, "r") docs = [] @@ -103,21 +114,28 @@ def ingest(self, dataset_name, split): self.retriever.ingest(docs, overwrite_pid=False) def generate_feature_data(self, dataset_name, split): - _, queries, qrels = HFDataLoader( - hf_repo=dataset_name, - hf_repo_qrels=None, - streaming=False, - keep_in_memory=False, - ).load(split=split) - exp_dir = os.path.join(self.output_prefix, split) if not os.path.exists(exp_dir): os.makedirs(exp_dir) - query_file = os.path.join(exp_dir, "queries.jsonl") - save_queries(queries, query_file) qrels_file = os.path.join(exp_dir, "qrels.jsonl") - save_qrels(qrels, qrels_file) + + if dataset_name in ["anthropic_base", "anthropic_context"]: + shutil.copy('experiments/data/contextual-embeddings/data_context/queries.jsonl', query_file) + shutil.copy('experiments/data/contextual-embeddings/data_context/qrels.jsonl', qrels_file) + data = DenserData("experiments/data/contextual-embeddings/data_base") + queries = data.load_queries() + qrels = data.load_qrels() + else: # assume HF datasets + _, queries, qrels = HFDataLoader( + hf_repo=dataset_name, + hf_repo_qrels=None, + streaming=False, + keep_in_memory=False, + ).load(split=split) + save_queries(queries, query_file) + save_qrels(qrels, qrels_file) + feature_file = os.path.join(exp_dir, "features.svmlight") feature_out = open(feature_file, "w") @@ -445,7 +463,8 @@ def test(self, eval_on, model_dir): retriever_config, ) - def report(self, eval_on): + def report(self, eval_on, metric_str): + print(f"\n== {metric_str}") for metric_file in [ "metric_keyword.json", "metric_vector.json", @@ -462,7 +481,7 @@ def report(self, eval_on): file = os.path.join(self.output_prefix, eval_on, metric_file) for line in open(file, "r"): line = line.strip() - if "NDCG@10" in line: + if metric_str in line: print(f"{metric_file}: {line}") break @@ -512,4 +531,5 @@ def report(self, eval_on): logger.info( f"train: {train_on}, eval: {eval_on}, cross-validation: {train_on == eval_on}" ) - experiment.report(eval_on) + experiment.report(eval_on, "NDCG@20") + experiment.report(eval_on, "Recall@20") diff --git a/experiments/utils.py b/experiments/utils.py index a89d60d..a8b6fca 100644 --- a/experiments/utils.py +++ b/experiments/utils.py @@ -11,6 +11,16 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) +def copy_file(source_file, dest_file, top_k): + # Open the source file and destination file + with open(source_file, 'r') as src, open(dest_file, 'w') as dest: + # Loop through the first K lines and write them to the destination file + for i, line in enumerate(src): + if top_k <= 0 or i < top_k: + dest.write(line) + else: + break + def save_HF_corpus_as_docs(corpus, output_file: str, max_doc_size): out = open(output_file, "w") seen = set()