Bioassay Data Analysis - Analyze bioassay data: PubChem assay summary, ChEMBL activity search, compound properties, and target info. Use this skill for bioassay science tasks involving get assay summary by cid search activity calculate mol basic info get target by name. Combines 4 tools from 3 SCP server(s).
Discipline: Bioassay Science | Tools Used: 4 | Servers: 3
Analyze bioassay data: PubChem assay summary, ChEMBL activity search, compound properties, and target info.
get_assay_summary_by_cid from pubchem-server (streamable-http) - https://scp.intern-ai.org.cn/api/v1/mcp/8/Origene-PubChemsearch_activity from chembl-server (streamable-http) - https://scp.intern-ai.org.cn/api/v1/mcp/4/Origene-ChEMBLcalculate_mol_basic_info from server-2 (streamable-http) - https://scp.intern-ai.org.cn/api/v1/mcp/2/DrugSDA-Toolget_target_by_name from chembl-server (streamable-http) - https://scp.intern-ai.org.cn/api/v1/mcp/4/Origene-ChEMBL{
"cid": 2244,
"target": "cyclooxygenase"
}
Note: Replace
<YOUR_SCP_HUB_API_KEY>with your own SCP Hub API Key. You can obtain one from the SCP Platform.
import asyncio
import json
from mcp import ClientSession
from mcp.client.streamable_http import streamablehttp_client
from mcp.client.sse import sse_client
SERVERS = {
"pubchem-server": "https://scp.intern-ai.org.cn/api/v1/mcp/8/Origene-PubChem",
"chembl-server": "https://scp.intern-ai.org.cn/api/v1/mcp/4/Origene-ChEMBL",
"server-2": "https://scp.intern-ai.org.cn/api/v1/mcp/2/DrugSDA-Tool"
}
async def connect(url, transport_type):
transport = streamablehttp_client(url=url, headers={"SCP-HUB-API-KEY": "<YOUR_SCP_HUB_API_KEY>"})
read, write, _ = await transport.__aenter__()
ctx = ClientSession(read, write)
session = await ctx.__aenter__()
await session.initialize()
return session, ctx, transport
def parse(result):
try:
if hasattr(result, 'content') and result.content:
c = result.content[0]
if hasattr(c, 'text'):
try: return json.loads(c.text)
except: return c.text
return str(result)
except: return str(result)
async def main():
# Connect to required servers
sessions = {}
sessions["pubchem-server"], _, _ = await connect("https://scp.intern-ai.org.cn/api/v1/mcp/8/Origene-PubChem", "streamable-http")
sessions["chembl-server"], _, _ = await connect("https://scp.intern-ai.org.cn/api/v1/mcp/4/Origene-ChEMBL", "streamable-http")
sessions["server-2"], _, _ = await connect("https://scp.intern-ai.org.cn/api/v1/mcp/2/DrugSDA-Tool", "streamable-http")
# Execute workflow steps
# Step 1: Get PubChem bioassay summary
result_1 = await sessions["pubchem-server"].call_tool("get_assay_summary_by_cid", arguments={})
data_1 = parse(result_1)
print(f"Step 1 result: {json.dumps(data_1, indent=2, ensure_ascii=False)[:500]}")
# Step 2: Search ChEMBL activities
result_2 = await sessions["chembl-server"].call_tool("search_activity", arguments={})
data_2 = parse(result_2)
print(f"Step 2 result: {json.dumps(data_2, indent=2, ensure_ascii=False)[:500]}")
# Step 3: Calculate compound properties
result_3 = await sessions["server-2"].call_tool("calculate_mol_basic_info", arguments={})
data_3 = parse(result_3)
print(f"Step 3 result: {json.dumps(data_3, indent=2, ensure_ascii=False)[:500]}")
# Step 4: Get target information
result_4 = await sessions["chembl-server"].call_tool("get_target_by_name", arguments={})
data_4 = parse(result_4)
print(f"Step 4 result: {json.dumps(data_4, indent=2, ensure_ascii=False)[:500]}")
# Cleanup
print("Workflow complete!")
if __name__ == "__main__":
asyncio.run(main())