Skip to content

Commit

Permalink
Merge pull request #689 from shenchucheng/fix-bugs-scc
Browse files Browse the repository at this point in the history
Fix some bugs
  • Loading branch information
geekan authored Jan 5, 2024
2 parents a3b5ca9 + 9ce0182 commit 136b3f5
Show file tree
Hide file tree
Showing 6 changed files with 15 additions and 6 deletions.
1 change: 1 addition & 0 deletions metagpt/provider/google_gemini_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ async def _achat_completion_stream(self, messages: list[dict]) -> str:
content = chunk.text
log_llm_stream(content)
collected_content.append(content)
log_llm_stream("\n")

full_content = "".join(collected_content)
usage = await self.aget_usage(messages, full_content)
Expand Down
1 change: 1 addition & 0 deletions metagpt/provider/ollama_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ async def _achat_completion_stream(self, messages: list[dict]) -> str:
else:
# stream finished
usage = self.get_usage(chunk)
log_llm_stream("\n")

self._update_costs(usage)
full_content = "".join(collected_content)
Expand Down
1 change: 1 addition & 0 deletions metagpt/provider/openai_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ async def acompletion_text(self, messages: list[dict], stream=False, timeout=3)
async for i in resp:
log_llm_stream(i)
collected_messages.append(i)
log_llm_stream("\n")

full_reply_content = "".join(collected_messages)
usage = self._calc_usage(messages, full_reply_content)
Expand Down
1 change: 1 addition & 0 deletions metagpt/provider/zhipuai_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ async def _achat_completion_stream(self, messages: list[dict], timeout=3) -> str
usage = meta.get("usage")
else:
print(f"zhipuapi else event: {event.data}", end="")
log_llm_stream("\n")

self._update_costs(usage)
full_content = "".join(collected_content)
Expand Down
9 changes: 6 additions & 3 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def llm_api():
logger.info("Tearing down the test")


@pytest.fixture(scope="session")
@pytest.fixture
def proxy():
pattern = re.compile(
rb"(?P<method>[a-zA-Z]+) (?P<uri>(\w+://)?(?P<host>[^\s\'\"<>\[\]{}|/:]+)(:(?P<port>\d+))?[^\s\'\"<>\[\]{}|]*) "
Expand All @@ -136,8 +136,11 @@ async def handle_client(reader, writer):
remote_writer.write(data)
await asyncio.gather(pipe(reader, remote_writer), pipe(remote_reader, writer))

server = asyncio.get_event_loop().run_until_complete(asyncio.start_server(handle_client, "127.0.0.1", 0))
return "http://{}:{}".format(*server.sockets[0].getsockname())
async def proxy_func():
server = await asyncio.start_server(handle_client, "127.0.0.1", 0)
return server, "http://{}:{}".format(*server.sockets[0].getsockname())

return proxy_func()


# see https://github.com/Delgan/loguru/issues/59#issuecomment-466591978
Expand Down
8 changes: 5 additions & 3 deletions tests/metagpt/tools/test_web_browser_engine_playwright.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,17 @@
@pytest.mark.parametrize(
"browser_type, use_proxy, kwagrs, url, urls",
[
("chromium", {"proxy": True}, {}, "https://deepwisdom.ai", ("https://deepwisdom.ai",)),
("firefox", {}, {"ignore_https_errors": True}, "https://deepwisdom.ai", ("https://deepwisdom.ai",)),
("webkit", {}, {"ignore_https_errors": True}, "https://deepwisdom.ai", ("https://deepwisdom.ai",)),
("chromium", {"proxy": True}, {}, "https://www.deepwisdom.ai", ("https://www.deepwisdom.ai",)),
("firefox", {}, {"ignore_https_errors": True}, "https://www.deepwisdom.ai", ("https://www.deepwisdom.ai",)),
("webkit", {}, {"ignore_https_errors": True}, "https://www.deepwisdom.ai", ("https://www.deepwisdom.ai",)),
],
ids=["chromium-normal", "firefox-normal", "webkit-normal"],
)
async def test_scrape_web_page(browser_type, use_proxy, kwagrs, url, urls, proxy, capfd):
global_proxy = CONFIG.global_proxy
try:
if use_proxy:
server, proxy = await proxy
CONFIG.global_proxy = proxy
browser = web_browser_engine_playwright.PlaywrightWrapper(browser_type=browser_type, **kwagrs)
result = await browser.run(url)
Expand All @@ -35,6 +36,7 @@ async def test_scrape_web_page(browser_type, use_proxy, kwagrs, url, urls, proxy
assert len(results) == len(urls) + 1
assert all(("MetaGPT" in i.inner_text) for i in results)
if use_proxy:
server.close()
assert "Proxy:" in capfd.readouterr().out
finally:
CONFIG.global_proxy = global_proxy
Expand Down

0 comments on commit 136b3f5

Please sign in to comment.