Merge pull request #52 from huggingface/logging_fix

improve logging
This commit is contained in:
Andrés Marafioti
2024-08-27 15:31:33 +02:00
committed by GitHub
10 changed files with 1 additions and 29 deletions

View File

@@ -13,9 +13,6 @@ from rich.console import Console
import logging
from nltk import sent_tokenize
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
console = Console()

View File

@@ -5,9 +5,6 @@ from mlx_lm import load, stream_generate, generate
from rich.console import Console
import torch
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
console = Console()

View File

@@ -6,9 +6,6 @@ import numpy as np
from rich.console import Console
import torch
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
console = Console()

View File

@@ -9,11 +9,7 @@ from baseHandler import BaseHandler
from rich.console import Console
import logging
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
console = Console()

View File

@@ -6,9 +6,6 @@ import numpy as np
from rich.console import Console
import torch
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
console = Console()

View File

@@ -19,9 +19,6 @@ torch._inductor.config.fx_graph_cache = True
# mind about this parameter ! should be >= 2 * number of padded prompt sizes for TTS
torch._dynamo.config.cache_size_limit = 15
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
console = Console()

View File

@@ -8,9 +8,6 @@ from utils.utils import int2float
import logging
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
console = Console()

View File

@@ -2,9 +2,6 @@ import socket
from rich.console import Console
import logging
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
console = Console()

View File

@@ -2,9 +2,6 @@ import socket
from rich.console import Console
import logging
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
console = Console()

View File

@@ -43,8 +43,8 @@ except (LookupError, OSError):
CURRENT_DIR = Path(__file__).resolve().parent
os.environ["TORCHINDUCTOR_CACHE_DIR"] = os.path.join(CURRENT_DIR, "tmp")
console = Console()
logging.getLogger("numba").setLevel(logging.WARNING) # quiet down numba logs
def prepare_args(args, prefix):