Lint
This commit is contained in:
parent
bec4e0a1ce
commit
89e7e107fc
5 changed files with 22 additions and 21 deletions
|
|
@ -3,7 +3,7 @@ def get_alpha_value(alpha, base):
|
|||
Gets alpha_value from alpha_value and rope_freq_base
|
||||
'''
|
||||
if base > 0:
|
||||
return (base/10000.) ** (63/64.)
|
||||
return (base / 10000.) ** (63 / 64.)
|
||||
else:
|
||||
return alpha
|
||||
|
||||
|
|
@ -15,4 +15,4 @@ def get_rope_freq_base(alpha, base):
|
|||
if base > 0:
|
||||
return base
|
||||
else:
|
||||
return 10000 * alpha ** (64/63.)
|
||||
return 10000 * alpha ** (64 / 63.)
|
||||
|
|
|
|||
|
|
@ -529,6 +529,7 @@ def load_history_after_deletion(state, idx):
|
|||
|
||||
histories = find_all_histories(state)
|
||||
idx = min(int(idx), len(histories) - 1)
|
||||
idx = max(0, idx)
|
||||
|
||||
if len(histories) > 0:
|
||||
history = load_history(histories[idx], state['character_menu'], state['mode'])
|
||||
|
|
@ -542,6 +543,7 @@ def load_history_after_deletion(state, idx):
|
|||
def update_character_menu_after_deletion(idx):
|
||||
characters = utils.get_available_characters()
|
||||
idx = min(int(idx), len(characters) - 1)
|
||||
idx = max(0, idx)
|
||||
return gr.update(choices=characters, value=characters[idx])
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -309,14 +309,14 @@ def AutoAWQ_loader(model_name):
|
|||
model_dir = Path(f'{shared.args.model_dir}/{model_name}')
|
||||
|
||||
model = AutoAWQForCausalLM.from_quantized(
|
||||
quant_path=model_dir,
|
||||
max_new_tokens=shared.args.max_seq_len,
|
||||
trust_remote_code=shared.args.trust_remote_code,
|
||||
fuse_layers=not shared.args.no_inject_fused_attention,
|
||||
max_memory=get_max_memory_dict(),
|
||||
batch_size=1,
|
||||
safetensors=any(model_dir.glob('*.safetensors')),
|
||||
)
|
||||
quant_path=model_dir,
|
||||
max_new_tokens=shared.args.max_seq_len,
|
||||
trust_remote_code=shared.args.trust_remote_code,
|
||||
fuse_layers=not shared.args.no_inject_fused_attention,
|
||||
max_memory=get_max_memory_dict(),
|
||||
batch_size=1,
|
||||
safetensors=any(model_dir.glob('*.safetensors')),
|
||||
)
|
||||
|
||||
return model
|
||||
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ def create_ui():
|
|||
shared.gradio['send-chat-to-default'] = gr.Button('Send to default')
|
||||
shared.gradio['send-chat-to-notebook'] = gr.Button('Send to notebook')
|
||||
|
||||
with gr.Row(elem_id='past-chats-row', elem_classes=['pretty_scrollbar']):
|
||||
with gr.Row(elem_id='past-chats-row', elem_classes=['pretty_scrollbar']):
|
||||
with gr.Column():
|
||||
with gr.Row():
|
||||
shared.gradio['unique_id'] = gr.Dropdown(label='Past chats', elem_classes=['slim-dropdown'], interactive=not mu)
|
||||
|
|
@ -246,7 +246,6 @@ def create_event_handlers():
|
|||
|
||||
shared.gradio['delete_chat'].click(lambda: [gr.update(visible=True), gr.update(visible=False), gr.update(visible=True)], None, gradio(clear_arr))
|
||||
shared.gradio['delete_chat-cancel'].click(lambda: [gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, gradio(clear_arr))
|
||||
|
||||
shared.gradio['delete_chat-confirm'].click(
|
||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||
lambda x, y: str(chat.find_all_histories(x).index(y)), gradio('interface_state', 'unique_id'), gradio('temporary_text')).then(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue