feat: workflows for formatting/linting (#35)

* add github workflows for pylint and yapf

* yapf

* docstrings for auth

* fix auth.py

* fix generators.py

* fix gen_logging.py

* fix main.py

* fix model.py

* fix templating.py

* fix utils.py

* update formatting.sh to include subdirs for pylint

* fix model_test.py

* fix wheel_test.py

* rename utils to utils_oai

* fix OAI/utils_oai.py

* fix completion.py

* fix token.py

* fix lora.py

* fix common.py

* add pylintrc and fix model.py

* finish up pylint

* fix attribute error

* main.py formatting

* add formatting batch script

* Main: Remove unnecessary global

Linter suggestion.

Signed-off-by: kingbri <bdashore3@proton.me>

* switch to ruff

* Formatting + Linting: Add ruff.toml

Signed-off-by: kingbri <bdashore3@proton.me>

* Formatting + Linting: Switch scripts to use ruff

Also remove the file and recent file change functions from both
scripts.

Signed-off-by: kingbri <bdashore3@proton.me>

* Tree: Format and lint

Signed-off-by: kingbri <bdashore3@proton.me>

* Scripts + Workflows: Format

Signed-off-by: kingbri <bdashore3@proton.me>

* Tree: Remove pylint flags

We use ruff now

Signed-off-by: kingbri <bdashore3@proton.me>

* Tree: Format

Signed-off-by: kingbri <bdashore3@proton.me>

* Formatting: Line length is 88

Use the same value as Black.

Signed-off-by: kingbri <bdashore3@proton.me>

* Tree: Format

Update to new line length rules.

Signed-off-by: kingbri <bdashore3@proton.me>

---------

Authored-by: AlpinDale <52078762+AlpinDale@users.noreply.github.com>
Co-authored-by: kingbri <bdashore3@proton.me>
This commit is contained in:
AlpinDale
2023-12-22 16:20:35 +00:00
committed by GitHub
parent a14abfe21c
commit fa47f51f85
22 changed files with 1210 additions and 511 deletions

View File

@@ -1,22 +1,49 @@
""" Test the model container. """
from model import ModelContainer
def progress(module, modules):
"""Wrapper callback for load progress."""
yield module, modules
container = ModelContainer("/mnt/str/models/_exl2/mistral-7b-instruct-exl2/4.0bpw/")
loader = container.load_gen(progress)
for (module, modules) in loader:
print(module, modules)
generator = container.generate_gen("Once upon a tim", token_healing = True)
for g in generator:
print(g, end = "")
def test_load_gen(model_path):
"""Test loading a model."""
container = ModelContainer(model_path)
loader = container.load_gen(progress)
for module, modules in loader:
print(module, modules)
container.unload()
del container
container.unload()
del container
mc = ModelContainer("/mnt/str/models/_exl2/mistral-7b-instruct-exl2/4.65bpw/")
mc.load(progress)
def test_generate_gen(model_path):
"""Test generating from a model."""
container = ModelContainer(model_path)
generator = container.generate_gen("Once upon a tim", token_healing=True)
for chunk in generator:
print(chunk, end="")
container.unload()
del container
response = mc.generate("All work and no play makes turbo a derpy cat.\nAll work and no play makes turbo a derpy cat.\nAll", top_k = 1, max_new_tokens = 1000, stream_interval = 0.5)
print (response)
def test_generate(model_path):
"""Test generating from a model."""
model_container = ModelContainer(model_path)
model_container.load(progress)
prompt = (
"All work and no play makes turbo a derpy cat.\n"
"All work and no play makes turbo a derpy cat.\nAll"
)
response = model_container.generate(
prompt, top_k=1, max_new_tokens=1000, stream_interval=0.5
)
print(response)
if __name__ == "__main__":
MODEL1 = "/mnt/str/models/_exl2/mistral-7b-instruct-exl2/4.0bpw/"
MODEL2 = "/mnt/str/models/_exl2/mistral-7b-instruct-exl2/4.65bpw/"
test_load_gen(MODEL1)
test_generate_gen(MODEL1)
test_generate(MODEL2)

View File

@@ -1,3 +1,4 @@
""" Test if the wheels are installed correctly. """
from importlib.metadata import version
from importlib.util import find_spec
@@ -34,8 +35,12 @@ else:
print(
f"\nSuccessful imports: {', '.join(successful_packages)}",
f"\nErrored imports: {''.join(errored_packages)}"
f"\nErrored imports: {''.join(errored_packages)}",
)
if len(errored_packages) > 0:
print("\nIf packages are installed, but not found on this test, please check the wheel versions for the correct python version and CUDA version (if applicable).")
print(
"\nIf packages are installed, but not found on this test, please "
"check the wheel versions for the correct python version and CUDA "
"version (if applicable)."
)