Prevent unwanted log messages from modules

This commit is contained in:
oobabooga 2023-05-21 22:42:34 -03:00
parent fb91406e93
commit e116d31180
20 changed files with 120 additions and 111 deletions

View file

@ -6,20 +6,20 @@ Documentation:
https://abetlen.github.io/llama-cpp-python/
'''
import logging
import re
from llama_cpp import Llama, LlamaCache
from modules import shared
from modules.callbacks import Iteratorize
from modules.logging_colors import logger
class LlamaCppModel:
def __init__(self):
self.initialized = False
def __del__(self):
def __del__(self):
self.model.__del__()
@classmethod
@ -35,7 +35,7 @@ class LlamaCppModel:
else:
cache_capacity = int(shared.args.cache_capacity)
logging.info("Cache capacity is " + str(cache_capacity) + " bytes")
logger.info("Cache capacity is " + str(cache_capacity) + " bytes")
params = {
'model_path': str(path),