Skip to content

Commit f8f40ef

Browse files
committed
Lint
1 parent 16ef08c commit f8f40ef

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+300
-347
lines changed

examples/gradio_chat/local.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
1+
import gradio as gr
12
import llama_cpp
23
import llama_cpp.llama_tokenizer
34

4-
import gradio as gr
5-
65
llama = llama_cpp.Llama.from_pretrained(
76
repo_id="Qwen/Qwen1.5-0.5B-Chat-GGUF",
87
filename="*q8_0.gguf",

examples/gradio_chat/server.py

-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import gradio as gr
2-
32
from openai import OpenAI
43

54
client = OpenAI(base_url="http://localhost:8000/v1", api_key="llama.cpp")

examples/hf_pull/main.py

-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import llama_cpp
22
import llama_cpp.llama_tokenizer
33

4-
54
llama = llama_cpp.Llama.from_pretrained(
65
repo_id="Qwen/Qwen1.5-0.5B-Chat-GGUF",
76
filename="*q8_0.gguf",

examples/high_level_api/fastapi_server.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@
2626
"""
2727

2828
import os
29-
import uvicorn
3029

30+
import uvicorn
3131
from llama_cpp.server.app import create_app
3232

3333
if __name__ == "__main__":

examples/high_level_api/high_level_api_inference.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
import json
21
import argparse
2+
import json
33

44
from llama_cpp import Llama
55

examples/high_level_api/high_level_api_streaming.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
import json
21
import argparse
2+
import json
33

44
from llama_cpp import Llama
55

examples/high_level_api/langchain_custom_llm.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,8 @@
11
import argparse
2-
3-
from llama_cpp import Llama
2+
from typing import Any, List, Mapping, Optional
43

54
from langchain.llms.base import LLM
6-
from typing import Optional, List, Mapping, Any
5+
from llama_cpp import Llama
76

87

98
class LlamaLLM(LLM):
@@ -41,9 +40,9 @@ def _identifying_params(self) -> Mapping[str, Any]:
4140
)
4241
print(f"Answer: {answer.strip()}")
4342

43+
from langchain.chains import LLMChain
4444
# Using in a chain
4545
from langchain.prompts import PromptTemplate
46-
from langchain.chains import LLMChain
4746

4847
prompt = PromptTemplate(
4948
input_variables=["product"],

examples/low_level_api/Chat.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
#!/bin/python
2-
import sys, os, datetime
2+
import datetime
3+
import os
4+
import sys
5+
36
from common import GptParams
47
from low_level_api_chat_cpp import LLaMAInteract
58

examples/low_level_api/Miku.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
#!/bin/python
2-
import sys, os
2+
import os
3+
import sys
4+
35
from common import GptParams
46
from low_level_api_chat_cpp import LLaMAInteract
57

examples/low_level_api/ReasonAct.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
#!/bin/python
2-
import sys, os, datetime
2+
import datetime
3+
import os
4+
import sys
5+
36
from common import GptParams
47
from low_level_api_chat_cpp import LLaMAInteract
58

examples/low_level_api/common.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
1-
import os
21
import argparse
2+
import os
33
import re
4-
54
from dataclasses import dataclass, field
65
from typing import List
76

examples/low_level_api/low_level_api_chat_cpp.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,12 @@
1313

1414
import ctypes
1515
import sys
16-
from time import time
1716
from os import cpu_count, path
17+
from time import time
1818

1919
import llama_cpp
20-
from common import GptParams, gpt_params_parse, gpt_random_prompt
2120
import util
21+
from common import GptParams, gpt_params_parse, gpt_random_prompt
2222

2323

2424
# A LLaMA interactive session

examples/low_level_api/low_level_api_llama_cpp.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import ctypes
2-
import os
32
import multiprocessing
3+
import os
44

55
import llama_cpp
66

examples/low_level_api/quantize.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
import os
21
import argparse
2+
import os
3+
34
import llama_cpp
45

56

examples/ray/llm.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
1-
from starlette.requests import Request
21
from typing import Dict
2+
3+
from llama_cpp import Llama
4+
from starlette.requests import Request
5+
36
from ray import serve
47
from ray.serve import Application
5-
from llama_cpp import Llama
68

79

810
@serve.deployment

llama_cpp/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from .llama_cpp import *
21
from .llama import *
2+
from .llama_cpp import *
33

44
__version__ = "0.3.5"

llama_cpp/_ctypes_extensions.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,21 @@
11
from __future__ import annotations
22

3-
import sys
4-
import os
53
import ctypes
64
import functools
5+
import os
76
import pathlib
8-
7+
import sys
98
from typing import (
9+
TYPE_CHECKING,
1010
Any,
1111
Callable,
12+
Generic,
1213
List,
13-
Union,
1414
Optional,
15-
TYPE_CHECKING,
1615
TypeVar,
17-
Generic,
16+
Union
1817
)
18+
1919
from typing_extensions import TypeAlias
2020

2121

llama_cpp/_ggml.py

-1
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,3 @@
99

1010
libggml_base_path = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) / "lib"
1111
libggml = ctypes_ext.load_shared_library("ggml", libggml_base_path)
12-

llama_cpp/_internals.py

+9-16
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,19 @@
11
from __future__ import annotations
22

3-
import os
43
import ctypes
5-
6-
from typing import (
7-
Dict,
8-
List,
9-
Tuple,
10-
Optional,
11-
Sequence,
12-
)
13-
from dataclasses import dataclass, field
4+
import os
145
from contextlib import ExitStack
6+
from dataclasses import dataclass, field
7+
from typing import Dict, List, Optional, Sequence, Tuple
158

169
import numpy as np
1710
import numpy.typing as npt
1811

19-
from .llama_types import *
20-
from .llama_grammar import LlamaGrammar
21-
from ._utils import suppress_stdout_stderr
22-
2312
import llama_cpp.llama_cpp as llama_cpp
2413

14+
from ._utils import suppress_stdout_stderr
15+
from .llama_grammar import LlamaGrammar
16+
from .llama_types import *
2517

2618
# Python wrappers over llama.h structs
2719

@@ -631,7 +623,7 @@ def sample(
631623
if len(self.prev) > 0:
632624
nl_token = ctx_main.model.token_nl()
633625
nl_logit = logits_array[nl_token]
634-
last_tokens = self.prev[-self.params.penalty_last_n :]
626+
last_tokens = self.prev[-self.params.penalty_last_n:]
635627
last_tokens_size = min(len(last_tokens), self.params.penalty_last_n)
636628
if last_tokens_size > 0:
637629
last_tokens_p = (llama_cpp.llama_token * len(last_tokens))(*last_tokens)
@@ -697,8 +689,9 @@ def accept(self, ctx_main: LlamaContext, id: int, apply_grammar: bool):
697689
self.prev.append(id)
698690

699691

700-
from typing import List, Callable, Optional, Union
701692
import ctypes
693+
from typing import Callable, List, Optional, Union
694+
702695
import llama_cpp
703696

704697

llama_cpp/_logger.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
import sys
21
import ctypes
32
import logging
3+
import sys
44

55
import llama_cpp
66

@@ -26,6 +26,8 @@
2626
_last_log_level = GGML_LOG_LEVEL_TO_LOGGING_LEVEL[0]
2727

2828
# typedef void (*ggml_log_callback)(enum ggml_log_level level, const char * text, void * user_data);
29+
30+
2931
@llama_cpp.llama_log_callback
3032
def llama_log_callback(
3133
level: int,

llama_cpp/_utils.py

-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import os
22
import sys
3-
43
from typing import Any, Dict
54

65
# Avoid "LookupError: unknown encoding: ascii" when open() called in a destructor

0 commit comments

Comments
 (0)