Compare commits

...

No commits in common. "main" and "76e5e090427b06e78101f0ab8513fba6b2b21c33" have entirely different histories.

10 changed files with 522 additions and 3802 deletions

2
.env.example Normal file
View File

@ -0,0 +1,2 @@
OPENAI_API_KEY = ''
OAI_CONFIG_LIST = '[]'

7
.gitignore vendored
View File

@ -1,2 +1,5 @@
.aider*
.idea
.cache
.coding
.env
__pycache__

0
aitist/__init__.py Normal file
View File

46
aitist/app.py Executable file
View File

@ -0,0 +1,46 @@
#!/usr/bin/env python
"""Sample code slightly modified from https://github.com/microsoft/autogen/blob/main/notebook/agentchat_auto_feedback_from_code_execution.ipynb """
from os import getenv
import autogen
from dotenv import load_dotenv
# Load local configuration from .env
load_dotenv()
config_list = autogen.config_list_from_json(
"OAI_CONFIG_LIST",
filter_dict={
"model": ["gpt-3.5-turbo"]
},
)
# create an AssistantAgent named "assistant"
assistant = autogen.AssistantAgent(
name="assistant",
llm_config={
"cache_seed": 42, # seed for caching and reproducibility
"config_list": config_list, # a list of OpenAI API configurations
"temperature": 0, # temperature for sampling
}, # configuration for autogen's enhanced inference API which is compatible with OpenAI API
)
# create a UserProxyAgent instance named "user_proxy"
user_proxy = autogen.UserProxyAgent(
name="user_proxy",
human_input_mode="NEVER",
max_consecutive_auto_reply=10,
is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE"),
code_execution_config={
"work_dir": ".coding",
"use_docker": False, # set to True or image name like "python:3" to use docker
},
)
if __name__ == "__main__":
# the assistant receives a message from the user_proxy, which contains the task description
user_proxy.initiate_chat(
assistant,
message="""What date is today? Compare the year-to-date gain for META and TESLA.""",
)

34
app.py
View File

@ -1,34 +0,0 @@
from fastapi import FastAPI, File, UploadFile
import tempfile
import os
from fastapi import FastAPI
from fastapi.openapi.docs import get_swagger_ui_html
# Assuming Celery is already set up and imported correctly in the project.
from tasks import delete_temp_file # Import your Celery task here.
app = FastAPI(
title="AITist API",
description="This is a simple API for AI Tist.",
version="1.0.0",
docs_url="/docs", # Enable Swagger UI
)
@app.get("/docs")
async def custom_swagger_ui_html():
return get_swagger_ui_html(openapi_url=app.openapi_url, title=app.title + " - Swagger UI")
@app.post("/uploadfile/")
async def upload_file(file: UploadFile = File(...)):
contents = await file.read()
# Save the file to a temporary directory
temp_dir = tempfile.gettempdir()
temp_file_path = os.path.join(temp_dir, file.filename)
with open(temp_file_path, 'wb') as f:
f.write(contents)
# Call the Celery task that deletes the file after processing.
delete_temp_file.delay(temp_file_path) # Assuming this is your Celery task name.
return {"filename": file.filename}

27
flow.md
View File

@ -1,27 +0,0 @@
# The AItist Listening Flow
(implementation outline: https://github.com/lablab-ai/Whisper-transcription_and_diarization-speaker-identification-/blob/main/transcribtion_diarization.ipynb
```mermaid
flowchart TD
subgraph "Context Input"
record[\VAD/] --> transcribe
record --> diarize
diarize --> identify_speakers
identify_speakers --> regard{{Do speakers matter}}
transcribe --> regard
regard --yes--> parse_context
regard --"no"--> stop[/Stop\]
regard --some--> log_speakers
log_speakers --> parse_context
parse_context --> known{{Context known}}
known --"no"--> log_context[Log for async analysis]
end
subgraph Process
known --yes--> apply_context_prompt
apply_context_prompt --> llm_find_action["Find action from text"]
end
```

View File

@ -1,2 +0,0 @@
# This is a simple Python script that prints 'Hey there!'
print("Hey there!")

4157
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,18 +1,16 @@
[tool.poetry]
name = "aitist"
version = "0.1.0"
description = "A new Python project managed by Poetry."
authors = ["Timothy Farrell <tim@thecookiejar.me>"]
package-mode = false
description = ""
authors = ["Your Name <you@example.com>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.11"
fastapi = "^0.111.1"
uvicorn = {extras = ["standard"], version = "^0.17.6"}
celery = "^5.4.0"
pydantic = "^2.8.2"
pyannote-audio = "^3.3.1"
python = ">=3.9,<3.12"
pyautogen = "^0.2.2"
python-dotenv = "^1.0.0"
[build-system]
requires = ["poetry-core>=1.0.0"]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View File

@ -1,31 +0,0 @@
from celery import shared_task
@shared_task
def delete_temp_file(file_path):
"""Celery task to delete a temporary file."""
# Implementation goes here.
pass
@shared_task
def diarize(audio_path):
"""
Celery task that performs diarization on an audio file.
Placeholder for actual implementation.
"""
pass
@shared_task
def transcribe(audio_path):
"""
Celery task that transcribes speech from an audio file to text.
Placeholder for actual implementation.
"""
pass
@shared_task
def identify(image_path):
"""
Celery task that identifies objects or features in an image.
Placeholder for actual implementation.
"""
pass