Skip to content

Commit

Permalink
fix bug
Browse files Browse the repository at this point in the history
  • Loading branch information
wzh1994 committed Jan 3, 2025
1 parent 039bff0 commit 950567c
Show file tree
Hide file tree
Showing 8 changed files with 15 additions and 9 deletions.
1 change: 1 addition & 0 deletions .github/workflows/macOS_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ jobs:
git clone https://[email protected]/LazyAGI/LazyLLM-Data.git /tmp/lazyllm/data
brew install ffmpeg
pip install -r tests/requirements.txt
pip install -r tests/requirements_mac.txt
export LAZYLLM_DATA_PATH=/tmp/lazyllm/data
python -m pytest -v --reruns=2 tests/basic_tests/
timeout-minutes: 30
Expand Down
2 changes: 1 addition & 1 deletion lazyllm/components/text_to_speech/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,5 +40,5 @@ def __call__(self, finetuned_model=None, base_model=None):
LOG.warning(f"Note! That finetuned_model({finetuned_model}) is an invalid path, "
f"base_model({base_model}) will be used")
finetuned_model = base_model
return lazyllm.deploy.RelayServer(func=TTSBase.func(finetuned_model), launcher=self.launcher,
return lazyllm.deploy.RelayServer(func=self.__class__.func(finetuned_model), launcher=self.launcher,
log_path=self._log_path, cls='tts')()
8 changes: 4 additions & 4 deletions lazyllm/components/utils/downloader/model_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,8 +281,8 @@ def _verify_hub_token(self, token):
try:
api.whoami(token)
return True
except Exception as e:
lazyllm.LOG.warning('Verify failed: ', e)
except Exception:
if token: lazyllm.LOG.warning(f'Huggingface token {token} verified failed')
return False

@_envs_manager
Expand Down Expand Up @@ -334,8 +334,8 @@ def _verify_hub_token(self, token):
try:
api.login(token)
return True
except Exception as e:
lazyllm.LOG.warning('Verify failed: ', e)
except Exception:
if token: lazyllm.LOG.warning(f'Modelscope token {token} verified failed')
return False

def verify_model_id(self, model_id):
Expand Down
7 changes: 5 additions & 2 deletions lazyllm/tools/infer_service/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,10 @@ def get_infra_handle(self, token, job_id):
response.raise_for_status()
response = response.json()
base_model, url, deploy_method = response['base_model'], response['url'], response['deploy_method']
if self.uniform_status(response['status']) != 'Running':
lazyllm.LOG.warning(base_model)
lazyllm.LOG.warning(url)
lazyllm.LOG.warning(deploy_method)
if self.uniform_status(response['status']) != 'Ready':
raise RuntimeError(f'Job {job_id} is not running now')
if not (deployer := getattr(lazyllm.deploy, deploy_method, None)):
deployer = type(lazyllm.deploy.auto(base_model))
Expand All @@ -144,7 +147,7 @@ def get_status():
return self.uniform_status(response['status'])

n = 0
while (status := get_status()) != 'Running':
while (status := get_status()) != 'Ready':
if status in ('Invalid', 'Cancelled', 'Failed'):
raise RuntimeError(f'Deploy service failed. status is {status}')
if n > timeout: raise TimeoutError('Inference service has not started after 1800 seconds.')
Expand Down
2 changes: 1 addition & 1 deletion lazyllm/tools/infer_service/serve.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ async def create_job(self, job: JobDescription, token: str = Header(None)):
m = lazyllm.TrainableModule(job.deploy_model).deploy_method((lazyllm.deploy.auto, hypram))

# Launch Deploy:
thread = threading.Thread(target=m.update_server)
thread = threading.Thread(target=m.start)
thread.start()

# Sleep 5s for launch cmd.
Expand Down
2 changes: 2 additions & 0 deletions lazyllm/tools/services/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ def __init__(self, url):
def uniform_status(self, status):
if status == 'Invalid':
res = 'Invalid'
elif status == 'Ready':
res = 'Ready'
elif Status[status] == Status.Done:
res = 'Done'
elif Status[status] == Status.Cancelled:
Expand Down
1 change: 0 additions & 1 deletion tests/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ olefile
pytest-rerunfailures
pytest-order
pymilvus==2.4.10
milvus-lite==2.4.10
openpyxl
nbconvert
python-pptx
Expand Down
1 change: 1 addition & 0 deletions tests/requirements_mac.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
milvus-lite==2.4.10

0 comments on commit 950567c

Please sign in to comment.