Skip to content

Commit

Permalink
remove textcolor dependency
Browse files Browse the repository at this point in the history
Signed-off-by: Michele Dolfi <[email protected]>
  • Loading branch information
dolfim-ibm committed Sep 9, 2024
1 parent 71b149d commit 3c40a9c
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 38 deletions.
8 changes: 5 additions & 3 deletions deepsearch_glm/nlp_model_training/reference_parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
import time

import pandas as pd
import textColor as tc
import tqdm
from rich import Console
from tabulate import tabulate

# from deepsearch_glm.andromeda_nlp import nlp_model
Expand All @@ -24,6 +24,8 @@
)
from deepsearch_glm.utils.load_pretrained_models import get_resources_dir

console = Console()


def parse_arguments():
"""Function to parse arguments for nlp_train_reference"""
Expand Down Expand Up @@ -230,8 +232,8 @@ def parse_with_anystyle_api(anystyle, refs):

return tmp
except Exception as exc:
print(tc.red("could not call anystyle API endpoint ..."))
print(tc.yellow(f" -> error: {str(exc)}"))
console.print("could not call anystyle API endpoint ...", style="red")
console.print(f" -> error: {str(exc)}", style="yellow")

if os.path.exists(tmpfile):
os.remove(tmpfile)
Expand Down
14 changes: 8 additions & 6 deletions deepsearch_glm/nlp_model_training/semantic_classifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,16 @@
import sys

import pandas as pd
import textColor as tc
import tqdm
from rich.console import Console
from tabulate import tabulate

from deepsearch_glm.andromeda_nlp import nlp_model
from deepsearch_glm.nlp_utils import create_nlp_dir, init_nlp_model
from deepsearch_glm.utils.ds_utils import ds_index_query

console = Console()


def parse_arguments():
"""Function to parse arguments for `nlp_train_semantic`"""
Expand Down Expand Up @@ -231,15 +233,15 @@ def prepare_data_from_legacy_documents(doc):
label = "text"

if "title" in label:
print(tc.yellow(f"{label}, {type_}: {text[0:48]}"))
console.print(f"{label}, {type_}: {text[0:48]}", style="yellow")
elif "meta" in label:
print(tc.green(f"\t{label}, {type_}: {text[0:48]}"))
console.print(f"\t{label}, {type_}: {text[0:48]}", style="green")
elif "text" in label:
print(f"\t{label}, {type_}: {text[0:48]}")
console.print(f"\t{label}, {type_}: {text[0:48]}")
elif "reference" in label:
print(tc.blue(f"\t{label}, {type_}: {text[0:48]}"))
console.print(f"\t{label}, {type_}: {text[0:48]}", style="blue")
else:
print(tc.red(f"\t{label}, {type_}: {text[0:48]}"))
console.print(f"\t{label}, {type_}: {text[0:48]}", style="red")

if random.random() < 0.9:
training_sample = True
Expand Down
16 changes: 11 additions & 5 deletions deepsearch_glm/nlp_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from typing import List

import pandas as pd
import textColor as tc
from rich.console import Console
from tabulate import tabulate

from deepsearch_glm.andromeda_nlp import nlp_model
Expand All @@ -23,6 +23,9 @@
# import andromeda_nlp


console = Console()


def create_nlp_dir(tdir=None):
"""Function to create NLP directory"""

Expand Down Expand Up @@ -102,25 +105,28 @@ def print_key_on_shell(key, items):
table.append(_)

headers = ["type", "subtype", "subj_path", "char_i", "char_j", "original"]
print(tc.yellow(f"{key}: \n\n"), tabulate(table, headers=headers), "\n")
console.print(f"{key}: \n", style="yellow")
console.print(tabulate(table, headers=headers), "\n")

else:
df = pd.DataFrame(items["data"], columns=items["headers"])

print(tc.yellow(f"{key}: \n\n"), df.to_string(), "\n")
console.print(f"{key}: \n", style="yellow")
console.print(df.to_string(), "\n")


def print_on_shell(text, result):
"""Function to print text on shell"""

wrapper = textwrap.TextWrapper(width=70)
print(tc.yellow(f"\ntext: \n\n"), "\n".join(wrapper.wrap(text)), "\n")
console.print(f"\ntext: \n", style="yellow")
console.print("\n".join(wrapper.wrap(text)), "\n")

for _ in ["properties", "word-tokens", "instances", "entities", "relations"]:
if _ in result and len(result[_]["data"]) > 0:
print_key_on_shell(_, result[_])
else:
print(tc.yellow(f"{_}:"), " null\n\n")
console.print(f"[yellow]{_}:[/yellow]", " null\n\n")


def extract_metadata_from_doc(doc):
Expand Down
27 changes: 8 additions & 19 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 1 addition & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ build = "build.py"
[tool.poetry.dependencies]
python = "^3.8"
deepsearch-toolkit = { version = ">=0.31.0", optional = true }
textColor = "^3.0.1"
tabulate = ">=0.8.9"
numpy = [
{ version = "^1.26.4", markers = 'python_version >= "3.9"' },
Expand All @@ -29,6 +28,7 @@ python-dotenv = "^1.0.0"
pybind11 = "^2.10.4"
numerize = "^0.12"
tqdm = "^4.64.0"
rich = "^13.7.0"

[tool.poetry.group.test.dependencies]
pytest = "^7.4.2"
Expand Down Expand Up @@ -95,10 +95,6 @@ ignore_missing_imports = true
module = "tqdm.*"
ignore_missing_imports = true

[[tool.mypy.overrides]]
module = "textColor.*"
ignore_missing_imports = true

[[tool.mypy.overrides]]
module = "matplotlib.*"
ignore_missing_imports = true
Expand Down

0 comments on commit 3c40a9c

Please sign in to comment.