Add initial project files for prototypes
parent
b20e3ab72e
commit
39844a5785
|
|
@ -0,0 +1,175 @@
|
||||||
|
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
|
||||||
|
logs
|
||||||
|
_.log
|
||||||
|
npm-debug.log_
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
lerna-debug.log*
|
||||||
|
.pnpm-debug.log*
|
||||||
|
|
||||||
|
# Caches
|
||||||
|
|
||||||
|
.cache
|
||||||
|
|
||||||
|
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||||
|
|
||||||
|
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
|
||||||
|
pids
|
||||||
|
_.pid
|
||||||
|
_.seed
|
||||||
|
*.pid.lock
|
||||||
|
|
||||||
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
|
||||||
|
lib-cov
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
|
||||||
|
coverage
|
||||||
|
*.lcov
|
||||||
|
|
||||||
|
# nyc test coverage
|
||||||
|
|
||||||
|
.nyc_output
|
||||||
|
|
||||||
|
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
|
||||||
|
.grunt
|
||||||
|
|
||||||
|
# Bower dependency directory (https://bower.io/)
|
||||||
|
|
||||||
|
bower_components
|
||||||
|
|
||||||
|
# node-waf configuration
|
||||||
|
|
||||||
|
.lock-wscript
|
||||||
|
|
||||||
|
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||||
|
|
||||||
|
build/Release
|
||||||
|
|
||||||
|
# Dependency directories
|
||||||
|
|
||||||
|
node_modules/
|
||||||
|
jspm_packages/
|
||||||
|
|
||||||
|
# Snowpack dependency directory (https://snowpack.dev/)
|
||||||
|
|
||||||
|
web_modules/
|
||||||
|
|
||||||
|
# TypeScript cache
|
||||||
|
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# Optional npm cache directory
|
||||||
|
|
||||||
|
.npm
|
||||||
|
|
||||||
|
# Optional eslint cache
|
||||||
|
|
||||||
|
.eslintcache
|
||||||
|
|
||||||
|
# Optional stylelint cache
|
||||||
|
|
||||||
|
.stylelintcache
|
||||||
|
|
||||||
|
# Microbundle cache
|
||||||
|
|
||||||
|
.rpt2_cache/
|
||||||
|
.rts2_cache_cjs/
|
||||||
|
.rts2_cache_es/
|
||||||
|
.rts2_cache_umd/
|
||||||
|
|
||||||
|
# Optional REPL history
|
||||||
|
|
||||||
|
.node_repl_history
|
||||||
|
|
||||||
|
# Output of 'npm pack'
|
||||||
|
|
||||||
|
*.tgz
|
||||||
|
|
||||||
|
# Yarn Integrity file
|
||||||
|
|
||||||
|
.yarn-integrity
|
||||||
|
|
||||||
|
# dotenv environment variable files
|
||||||
|
|
||||||
|
.env
|
||||||
|
.env.development.local
|
||||||
|
.env.test.local
|
||||||
|
.env.production.local
|
||||||
|
.env.local
|
||||||
|
|
||||||
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
|
|
||||||
|
.parcel-cache
|
||||||
|
|
||||||
|
# Next.js build output
|
||||||
|
|
||||||
|
.next
|
||||||
|
out
|
||||||
|
|
||||||
|
# Nuxt.js build / generate output
|
||||||
|
|
||||||
|
.nuxt
|
||||||
|
dist
|
||||||
|
|
||||||
|
# Gatsby files
|
||||||
|
|
||||||
|
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||||
|
|
||||||
|
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||||
|
|
||||||
|
# public
|
||||||
|
|
||||||
|
# vuepress build output
|
||||||
|
|
||||||
|
.vuepress/dist
|
||||||
|
|
||||||
|
# vuepress v2.x temp and cache directory
|
||||||
|
|
||||||
|
.temp
|
||||||
|
|
||||||
|
# Docusaurus cache and generated files
|
||||||
|
|
||||||
|
.docusaurus
|
||||||
|
|
||||||
|
# Serverless directories
|
||||||
|
|
||||||
|
.serverless/
|
||||||
|
|
||||||
|
# FuseBox cache
|
||||||
|
|
||||||
|
.fusebox/
|
||||||
|
|
||||||
|
# DynamoDB Local files
|
||||||
|
|
||||||
|
.dynamodb/
|
||||||
|
|
||||||
|
# TernJS port file
|
||||||
|
|
||||||
|
.tern-port
|
||||||
|
|
||||||
|
# Stores VSCode versions used for testing VSCode extensions
|
||||||
|
|
||||||
|
.vscode-test
|
||||||
|
|
||||||
|
# yarn v2
|
||||||
|
|
||||||
|
.yarn/cache
|
||||||
|
.yarn/unplugged
|
||||||
|
.yarn/build-state.yml
|
||||||
|
.yarn/install-state.gz
|
||||||
|
.pnp.*
|
||||||
|
|
||||||
|
# IntelliJ based IDEs
|
||||||
|
.idea
|
||||||
|
|
||||||
|
# Finder (MacOS) folder config
|
||||||
|
.DS_Store
|
||||||
|
|
@ -0,0 +1,15 @@
|
||||||
|
# exxetagpt
|
||||||
|
|
||||||
|
To install dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun install
|
||||||
|
```
|
||||||
|
|
||||||
|
To run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun run index.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
This project was created using `bun init` in bun v1.1.34. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.
|
||||||
|
|
@ -0,0 +1,90 @@
|
||||||
|
from openai import AzureOpenAI
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
import os
|
||||||
|
import pymupdf
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
BASE_URL = "https://ai.exxeta.com/api/v2/azure/openai"
|
||||||
|
API_KEY = os.getenv("API_KEY")
|
||||||
|
|
||||||
|
client = AzureOpenAI(
|
||||||
|
api_key=API_KEY,
|
||||||
|
api_version="2023-07-01-preview",
|
||||||
|
base_url=BASE_URL
|
||||||
|
)
|
||||||
|
def extract_text_from_pdf(file_path):
|
||||||
|
"""Extract text content from a PDF file using PyMuPDF (fitz)."""
|
||||||
|
all_text = ""
|
||||||
|
# Open the PDF file
|
||||||
|
doc = pymupdf.open(file_path)
|
||||||
|
|
||||||
|
# Print number of pages
|
||||||
|
print(f"PDF has {len(doc)} pages")
|
||||||
|
|
||||||
|
# Extract and print text from each page
|
||||||
|
for page_num in range(len(doc)):
|
||||||
|
page = doc[page_num]
|
||||||
|
text = page.get_text()
|
||||||
|
|
||||||
|
# Print page number and content
|
||||||
|
print(text)
|
||||||
|
|
||||||
|
all_text += "[Page " + str(page_num + 1) + "]\n" + text + "\n\n"
|
||||||
|
|
||||||
|
return all_text
|
||||||
|
|
||||||
|
|
||||||
|
file_path = "../../pitch-books/Pitchbook 1.pdf"
|
||||||
|
pdf_text = extract_text_from_pdf(file_path)
|
||||||
|
|
||||||
|
response = client.chat.completions.create(
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "system",
|
||||||
|
"content": "Always respond with a valid JSON object"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": """extract the values from the text. let not found values empty:
|
||||||
|
-Fondsname
|
||||||
|
-Fondsmanager
|
||||||
|
-Name Kapitalverwaltungsgesellschaft
|
||||||
|
-Datum
|
||||||
|
-Risikoprofil
|
||||||
|
-Artikel gem. SFDR
|
||||||
|
-Ziel
|
||||||
|
-Zielrendite über die Fondslaufzeit
|
||||||
|
-Rendite seit Auflage
|
||||||
|
-Zielausschüttungsrendite über die Fondslaufzeit
|
||||||
|
-Ausschüttungsrendite seit Auflage
|
||||||
|
-Laufzeit
|
||||||
|
-LTV (Loan-to-Value)
|
||||||
|
-Soll/Ist
|
||||||
|
-Ziel
|
||||||
|
-Managementgebühren Bezogen auf NAV (Net Asset Value)
|
||||||
|
-Sektorenallokation
|
||||||
|
-Länderallokation
|
||||||
|
for each value return:
|
||||||
|
- the Key
|
||||||
|
- the Value
|
||||||
|
- the page where this value was found
|
||||||
|
- a confidence score, how confident the model is about the value (low, medium, high)
|
||||||
|
|
||||||
|
Here ist the text:""" + pdf_text
|
||||||
|
}
|
||||||
|
],
|
||||||
|
model="gpt-4o-mini",
|
||||||
|
response_format={"type": "json_object"}
|
||||||
|
# temperature=0.7,
|
||||||
|
# top_p=0.95,
|
||||||
|
# frequency_penalty=0,
|
||||||
|
# presence_penalty=0,
|
||||||
|
# max_tokens=800,
|
||||||
|
# stop="",
|
||||||
|
# stream=False
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
print(response.choices[0].message.content)
|
||||||
|
|
@ -0,0 +1,52 @@
|
||||||
|
acres==0.3.0
|
||||||
|
annotated-types==0.7.0
|
||||||
|
anyio==4.9.0
|
||||||
|
certifi==2025.1.31
|
||||||
|
charset-normalizer==3.4.1
|
||||||
|
ci-info==0.3.0
|
||||||
|
click==8.1.8
|
||||||
|
configobj==5.0.9
|
||||||
|
configparser==7.2.0
|
||||||
|
distro==1.9.0
|
||||||
|
etelemetry==0.3.1
|
||||||
|
filelock==3.18.0
|
||||||
|
h11==0.14.0
|
||||||
|
httpcore==1.0.8
|
||||||
|
httplib2==0.22.0
|
||||||
|
httpx==0.28.1
|
||||||
|
idna==3.10
|
||||||
|
isodate==0.6.1
|
||||||
|
jiter==0.9.0
|
||||||
|
looseversion==1.3.0
|
||||||
|
lxml==5.4.0
|
||||||
|
networkx==3.4.2
|
||||||
|
nibabel==5.3.2
|
||||||
|
nipype==1.10.0
|
||||||
|
numpy==2.2.5
|
||||||
|
openai==1.75.0
|
||||||
|
packaging==25.0
|
||||||
|
pandas==2.2.3
|
||||||
|
pathlib==1.0.1
|
||||||
|
prov==2.0.1
|
||||||
|
puremagic==1.28
|
||||||
|
pydantic==2.11.3
|
||||||
|
pydantic_core==2.33.1
|
||||||
|
pydot==3.0.4
|
||||||
|
PyMuPDF==1.25.5
|
||||||
|
pyparsing==3.2.3
|
||||||
|
python-dateutil==2.9.0.post0
|
||||||
|
python-dotenv==1.1.0
|
||||||
|
pytz==2025.2
|
||||||
|
pyxnat==1.6.3
|
||||||
|
rdflib==6.3.2
|
||||||
|
requests==2.32.3
|
||||||
|
scipy==1.15.2
|
||||||
|
simplejson==3.20.1
|
||||||
|
six==1.17.0
|
||||||
|
sniffio==1.3.1
|
||||||
|
tqdm==4.67.1
|
||||||
|
traits==7.0.2
|
||||||
|
typing-inspection==0.4.0
|
||||||
|
typing_extensions==4.13.2
|
||||||
|
tzdata==2025.2
|
||||||
|
urllib3==2.4.0
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
# FastAgent secrets file
|
||||||
|
fastagent.secrets.yaml
|
||||||
|
|
||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# Virtual Environment
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
3.13
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
import asyncio
|
||||||
|
from mcp_agent.core.fastagent import FastAgent
|
||||||
|
|
||||||
|
# Create the application
|
||||||
|
fast = FastAgent("fast-agent example")
|
||||||
|
|
||||||
|
|
||||||
|
# Define the agent
|
||||||
|
@fast.agent(instruction="You are a helpful AI Agent")
|
||||||
|
async def main():
|
||||||
|
# use the --model command line switch or agent arguments to change model
|
||||||
|
async with fast.run() as agent:
|
||||||
|
await agent.interactive()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
|
|
@ -0,0 +1,45 @@
|
||||||
|
# FastAgent Configuration File
|
||||||
|
|
||||||
|
# Default Model Configuration:
|
||||||
|
#
|
||||||
|
# Takes format:
|
||||||
|
# <provider>.<model_string>.<reasoning_effort?> (e.g. anthropic.claude-3-5-sonnet-20241022 or openai.o3-mini.low)
|
||||||
|
# Accepts aliases for Anthropic Models: haiku, haiku3, sonnet, sonnet35, opus, opus3
|
||||||
|
# and OpenAI Models: gpt-4.1, gpt-4.1-mini, o1, o1-mini, o3-mini
|
||||||
|
#
|
||||||
|
# If not specified, defaults to "haiku".
|
||||||
|
# Can be overriden with a command line switch --model=<model>, or within the Agent constructor.
|
||||||
|
|
||||||
|
default_model: openai.gpt-4o-mini
|
||||||
|
openai:
|
||||||
|
base_url: "https://ai.exxeta.com/api/v2/azure/openai" # Optional, only include to override
|
||||||
|
# reasoning_effort: "medium" # Default reasoning effort: "low", "medium", or "high"
|
||||||
|
api_version: "2023-05-15"
|
||||||
|
|
||||||
|
# Logging and Console Configuration:
|
||||||
|
logger:
|
||||||
|
level: "info"
|
||||||
|
# level: "debug" | "info" | "warning" | "error"
|
||||||
|
# type: "none" | "console" | "file" | "http"
|
||||||
|
# path: "/path/to/logfile.jsonl"
|
||||||
|
|
||||||
|
|
||||||
|
# Switch the progress display on or off
|
||||||
|
progress_display: true
|
||||||
|
|
||||||
|
# Show chat User/Assistant messages on the console
|
||||||
|
show_chat: true
|
||||||
|
# Show tool calls on the console
|
||||||
|
show_tools: true
|
||||||
|
# Truncate long tool responses on the console
|
||||||
|
truncate_tools: true
|
||||||
|
|
||||||
|
# MCP Servers
|
||||||
|
mcp:
|
||||||
|
servers:
|
||||||
|
fetch:
|
||||||
|
command: "uvx"
|
||||||
|
args: ["mcp-server-fetch"]
|
||||||
|
filesystem:
|
||||||
|
command: "npx"
|
||||||
|
args: ["-y", "@modelcontextprotocol/server-filesystem", "."]
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
{"level":"ERROR","timestamp":"2025-04-27T12:31:28.710267","namespace":"mcp_agent.llm.providers.augmented_llm_openai.default","message":"Error: Error code: 404 - {'timestamp': '2025-04-27T10:31:28.770+00:00', 'path': '/api/v2/azure/openai/chat/completions', 'status': 404, 'error': 'Not Found', 'requestId': '93b54269-15746'}"}
|
||||||
|
{"level":"ERROR","timestamp":"2025-04-27T12:32:25.709293","namespace":"mcp_agent.llm.providers.augmented_llm_openai.default","message":"Error: Error code: 404 - {'timestamp': '2025-04-27T10:32:25.774+00:00', 'path': '/api/v2/azure/openai/chat/completions', 'status': 404, 'error': 'Not Found', 'requestId': '93b54269-15748'}"}
|
||||||
|
{"level":"ERROR","timestamp":"2025-04-27T12:32:32.209425","namespace":"mcp_agent.llm.providers.augmented_llm_openai.default","message":"Error: Error code: 404 - {'timestamp': '2025-04-27T10:32:32.235+00:00', 'path': '/api/v2/azure/openai/chat/completions', 'status': 404, 'error': 'Not Found', 'requestId': '93b54269-15749'}"}
|
||||||
|
{"level":"ERROR","timestamp":"2025-04-27T12:32:33.396324","namespace":"mcp_agent.llm.providers.augmented_llm_openai.default","message":"Error: Error code: 404 - {'timestamp': '2025-04-27T10:32:33.445+00:00', 'path': '/api/v2/azure/openai/chat/completions', 'status': 404, 'error': 'Not Found', 'requestId': '93b54269-15750'}"}
|
||||||
|
{"level":"ERROR","timestamp":"2025-04-27T12:33:14.581286","namespace":"mcp_agent.llm.providers.augmented_llm_openai.default","message":"Error: Error code: 404 - {'timestamp': '2025-04-27T10:33:14.644+00:00', 'path': '/api/v2/azure/openai/chat/completions', 'status': 404, 'error': 'Not Found', 'requestId': '93b54269-15752'}"}
|
||||||
|
{"level":"ERROR","timestamp":"2025-04-27T12:34:52.331047","namespace":"mcp_agent.llm.providers.augmented_llm_openai.default","message":"Error: Error code: 404 - {'timestamp': '2025-04-27T10:34:52.392+00:00', 'path': '/api/v2/azure/openai/chat/completions', 'status': 404, 'error': 'Not Found', 'requestId': '848fc34a-15755'}"}
|
||||||
|
{"level":"ERROR","timestamp":"2025-04-27T12:35:01.450602","namespace":"mcp_agent.llm.providers.augmented_llm_openai.default","message":"Error: Error code: 404 - {'timestamp': '2025-04-27T10:35:01.464+00:00', 'path': '/api/v2/azure/openai/chat/completions', 'status': 404, 'error': 'Not Found', 'requestId': '848fc34a-15756'}"}
|
||||||
|
{"level":"INFO","timestamp":"2025-04-27T12:37:33.316933","namespace":"mcp_agent.context","message":"Configuring logger with level: info"}
|
||||||
|
{"level":"INFO","timestamp":"2025-04-27T12:37:33.317593","namespace":"mcp_agent.fast-agent example","message":"MCPAgent initialized","data":{"data":{"progress_action":"Running","target":"fast-agent example","agent_name":"fast-agent example"}}}
|
||||||
|
{"level":"INFO","timestamp":"2025-04-27T12:37:33.317624","namespace":"mcp_agent.core.direct_factory","message":"Loaded default","data":{"data":{"progress_action":"Loaded","agent_name":"default"}}}
|
||||||
|
{"level":"ERROR","timestamp":"2025-04-27T12:37:36.005537","namespace":"mcp_agent.llm.providers.augmented_llm_openai.default","message":"Error: Error code: 404 - {'timestamp': '2025-04-27T10:37:36.033+00:00', 'path': '/api/v2/azure/openai/chat/completions', 'status': 404, 'error': 'Not Found', 'requestId': '848fc34a-15761'}"}
|
||||||
|
{"level":"INFO","timestamp":"2025-04-27T12:38:34.788876","namespace":"mcp_agent.fast-agent example","message":"MCPAgent cleanup","data":{"data":{"progress_action":"Finished","target":"fast-agent example","agent_name":"fast-agent example"}}}
|
||||||
|
{"level":"INFO","timestamp":"2025-04-27T12:41:29.080203","namespace":"mcp_agent.context","message":"Configuring logger with level: info"}
|
||||||
|
{"level":"INFO","timestamp":"2025-04-27T12:41:29.080791","namespace":"mcp_agent.fast-agent example","message":"MCPAgent initialized","data":{"data":{"progress_action":"Running","target":"fast-agent example","agent_name":"fast-agent example"}}}
|
||||||
|
{"level":"INFO","timestamp":"2025-04-27T12:41:29.080823","namespace":"mcp_agent.core.direct_factory","message":"Loaded default","data":{"data":{"progress_action":"Loaded","agent_name":"default"}}}
|
||||||
|
{"level":"ERROR","timestamp":"2025-04-27T12:41:31.441433","namespace":"mcp_agent.llm.providers.augmented_llm_openai.default","message":"Error: Error code: 404 - {'timestamp': '2025-04-27T10:41:31.466+00:00', 'path': '/api/v2/azure/openai/chat/completions', 'status': 404, 'error': 'Not Found', 'requestId': '848fc34a-15769'}"}
|
||||||
|
{"level":"INFO","timestamp":"2025-04-27T13:02:03.841182","namespace":"mcp_agent.fast-agent example","message":"MCPAgent cleanup","data":{"data":{"progress_action":"Finished","target":"fast-agent example","agent_name":"fast-agent example"}}}
|
||||||
|
|
@ -0,0 +1,6 @@
|
||||||
|
def main():
|
||||||
|
print("Hello from fast-agent!")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
@ -0,0 +1,9 @@
|
||||||
|
[project]
|
||||||
|
name = "fast-agent"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Add your description here"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.13"
|
||||||
|
dependencies = [
|
||||||
|
"fast-agent-mcp>=0.2.18",
|
||||||
|
]
|
||||||
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue