Skip to content

Commit

Permalink
test
Browse files Browse the repository at this point in the history
  • Loading branch information
xyang16 committed Nov 15, 2024
1 parent 7573a19 commit 04b8ee5
Showing 1 changed file with 45 additions and 45 deletions.
90 changes: 45 additions & 45 deletions tests/integration/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -694,51 +694,51 @@ def test_lora_phi2(self):
@pytest.mark.gpu_4
class TestLmiDistLora:

def test_lora_llama2_7b(self):
with Runner('lmi', 'llama-7b-unmerged-lora') as r:
prepare.build_lmi_dist_model("llama-7b-unmerged-lora")
r.launch()
client.run("lmi_dist_adapters llama-7b-unmerged-lora".split())

def test_lora_llama2_7b_overflow(self):
with Runner('lmi', 'llama-7b-unmerged-lora-overflow') as r:
prepare.build_lmi_dist_model("llama-7b-unmerged-lora-overflow")
r.launch()
client.run(
"lmi_dist_adapters llama-7b-unmerged-lora-overflow".split())

def test_lora_llama2_13b_awq(self):
with Runner('lmi', 'llama2-13b-awq-unmerged-lora') as r:
prepare.build_lmi_dist_model("llama2-13b-awq-unmerged-lora")
r.launch()
client.run(
"lmi_dist_adapters llama2-13b-awq-unmerged-lora".split())

def test_lora_mistral_7b(self):
with Runner('lmi', 'mistral-7b-unmerged-lora') as r:
prepare.build_lmi_dist_model("mistral-7b-unmerged-lora")
r.launch()
client.run("lmi_dist_adapters mistral-7b-unmerged-lora".split())

def test_lora_mistral_7b_awq(self):
with Runner('lmi', 'mistral-7b-awq-unmerged-lora') as r:
prepare.build_lmi_dist_model("mistral-7b-awq-unmerged-lora")
r.launch()
client.run(
"lmi_dist_adapters mistral-7b-awq-unmerged-lora".split())

def test_lora_mistral_7b_gptq(self):
with Runner('lmi', 'mistral-7b-gptq-unmerged-lora') as r:
prepare.build_lmi_dist_model("mistral-7b-gptq-unmerged-lora")
r.launch()
client.run(
"lmi_dist_adapters mistral-7b-gptq-unmerged-lora".split())

def test_lora_llama3_8b(self):
with Runner('lmi', 'llama3-8b-unmerged-lora') as r:
prepare.build_lmi_dist_model("llama3-8b-unmerged-lora")
r.launch()
client.run("lmi_dist_adapters llama3-8b-unmerged-lora".split())
# def test_lora_llama2_7b(self):
# with Runner('lmi', 'llama-7b-unmerged-lora') as r:
# prepare.build_lmi_dist_model("llama-7b-unmerged-lora")
# r.launch()
# client.run("lmi_dist_adapters llama-7b-unmerged-lora".split())
#
# def test_lora_llama2_7b_overflow(self):
# with Runner('lmi', 'llama-7b-unmerged-lora-overflow') as r:
# prepare.build_lmi_dist_model("llama-7b-unmerged-lora-overflow")
# r.launch()
# client.run(
# "lmi_dist_adapters llama-7b-unmerged-lora-overflow".split())
#
# def test_lora_llama2_13b_awq(self):
# with Runner('lmi', 'llama2-13b-awq-unmerged-lora') as r:
# prepare.build_lmi_dist_model("llama2-13b-awq-unmerged-lora")
# r.launch()
# client.run(
# "lmi_dist_adapters llama2-13b-awq-unmerged-lora".split())
#
# def test_lora_mistral_7b(self):
# with Runner('lmi', 'mistral-7b-unmerged-lora') as r:
# prepare.build_lmi_dist_model("mistral-7b-unmerged-lora")
# r.launch()
# client.run("lmi_dist_adapters mistral-7b-unmerged-lora".split())
#
# def test_lora_mistral_7b_awq(self):
# with Runner('lmi', 'mistral-7b-awq-unmerged-lora') as r:
# prepare.build_lmi_dist_model("mistral-7b-awq-unmerged-lora")
# r.launch()
# client.run(
# "lmi_dist_adapters mistral-7b-awq-unmerged-lora".split())
#
# def test_lora_mistral_7b_gptq(self):
# with Runner('lmi', 'mistral-7b-gptq-unmerged-lora') as r:
# prepare.build_lmi_dist_model("mistral-7b-gptq-unmerged-lora")
# r.launch()
# client.run(
# "lmi_dist_adapters mistral-7b-gptq-unmerged-lora".split())
#
# def test_lora_llama3_8b(self):
# with Runner('lmi', 'llama3-8b-unmerged-lora') as r:
# prepare.build_lmi_dist_model("llama3-8b-unmerged-lora")
# r.launch()
# client.run("lmi_dist_adapters llama3-8b-unmerged-lora".split())

def test_lora_gemma_7b(self):
with Runner('lmi', 'gemma-7b-unmerged-lora') as r:
Expand Down

0 comments on commit 04b8ee5

Please sign in to comment.