Merge branch 'dev' into ntindle/open-2032-re-enable-getredditpostblock-sendemailblock

This commit is contained in:
Nicholas Tindle 2025-01-08 16:46:34 -06:00 committed by GitHub
commit 75634e6155
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
111 changed files with 13663 additions and 6682 deletions

View File

@ -89,28 +89,6 @@ updates:
- "minor"
- "patch"
# market (Poetry project)
- package-ecosystem: "pip"
directory: "autogpt_platform/market"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: "dev"
commit-message:
prefix: "chore(market/deps)"
prefix-development: "chore(market/deps-dev)"
groups:
production-dependencies:
dependency-type: "production"
update-types:
- "minor"
- "patch"
development-dependencies:
dependency-type: "development"
update-types:
- "minor"
- "patch"
# GitHub Actions
- package-ecosystem: "github-actions"

View File

@ -35,12 +35,6 @@ jobs:
env:
DATABASE_URL: ${{ secrets.BACKEND_DATABASE_URL }}
- name: Run Market Migrations
working-directory: ./autogpt_platform/market
run: |
python -m prisma migrate deploy
env:
DATABASE_URL: ${{ secrets.MARKET_DATABASE_URL }}
trigger:
needs: migrate

View File

@ -37,13 +37,6 @@ jobs:
env:
DATABASE_URL: ${{ secrets.BACKEND_DATABASE_URL }}
- name: Run Market Migrations
working-directory: ./autogpt_platform/market
run: |
python -m prisma migrate deploy
env:
DATABASE_URL: ${{ secrets.MARKET_DATABASE_URL }}
trigger:
needs: migrate
runs-on: ubuntu-latest

View File

@ -1,126 +0,0 @@
name: AutoGPT Platform - Backend CI
on:
push:
branches: [master, dev, ci-test*]
paths:
- ".github/workflows/platform-market-ci.yml"
- "autogpt_platform/market/**"
pull_request:
branches: [master, dev, release-*]
paths:
- ".github/workflows/platform-market-ci.yml"
- "autogpt_platform/market/**"
merge_group:
concurrency:
group: ${{ format('backend-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}
cancel-in-progress: ${{ startsWith(github.event_name, 'pull_request') }}
defaults:
run:
shell: bash
working-directory: autogpt_platform/market
jobs:
test:
permissions:
contents: read
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
submodules: true
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Supabase
uses: supabase/setup-cli@v1
with:
version: latest
- id: get_date
name: Get date
run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT
- name: Set up Python dependency cache
uses: actions/cache@v4
with:
path: ~/.cache/pypoetry
key: poetry-${{ runner.os }}-${{ hashFiles('autogpt_platform/market/poetry.lock') }}
- name: Install Poetry (Unix)
run: |
curl -sSL https://install.python-poetry.org | python3 -
if [ "${{ runner.os }}" = "macOS" ]; then
PATH="$HOME/.local/bin:$PATH"
echo "$HOME/.local/bin" >> $GITHUB_PATH
fi
- name: Install Python dependencies
run: poetry install
- name: Generate Prisma Client
run: poetry run prisma generate
- id: supabase
name: Start Supabase
working-directory: .
run: |
supabase init
supabase start --exclude postgres-meta,realtime,storage-api,imgproxy,inbucket,studio,edge-runtime,logflare,vector,supavisor
supabase status -o env | sed 's/="/=/; s/"$//' >> $GITHUB_OUTPUT
# outputs:
# DB_URL, API_URL, GRAPHQL_URL, ANON_KEY, SERVICE_ROLE_KEY, JWT_SECRET
- name: Run Database Migrations
run: poetry run prisma migrate dev --name updates
env:
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
- id: lint
name: Run Linter
run: poetry run lint
# Tests comment out because they do not work with prisma mock, nor have they been updated since they were created
# - name: Run pytest with coverage
# run: |
# if [[ "${{ runner.debug }}" == "1" ]]; then
# poetry run pytest -s -vv -o log_cli=true -o log_cli_level=DEBUG test
# else
# poetry run pytest -s -vv test
# fi
# if: success() || (failure() && steps.lint.outcome == 'failure')
# env:
# LOG_LEVEL: ${{ runner.debug && 'DEBUG' || 'INFO' }}
# DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
# SUPABASE_URL: ${{ steps.supabase.outputs.API_URL }}
# SUPABASE_SERVICE_ROLE_KEY: ${{ steps.supabase.outputs.SERVICE_ROLE_KEY }}
# SUPABASE_JWT_SECRET: ${{ steps.supabase.outputs.JWT_SECRET }}
# REDIS_HOST: 'localhost'
# REDIS_PORT: '6379'
# REDIS_PASSWORD: 'testpassword'
env:
CI: true
PLAIN_OUTPUT: True
RUN_ENV: local
PORT: 8080
# - name: Upload coverage reports to Codecov
# uses: codecov/codecov-action@v4
# with:
# token: ${{ secrets.CODECOV_TOKEN }}
# flags: backend,${{ runner.os }}

View File

@ -0,0 +1,76 @@
from typing import Annotated, Any, Literal, Optional, TypedDict
from uuid import uuid4
from pydantic import BaseModel, Field, SecretStr, field_serializer
class _BaseCredentials(BaseModel):
id: str = Field(default_factory=lambda: str(uuid4()))
provider: str
title: Optional[str]
@field_serializer("*")
def dump_secret_strings(value: Any, _info):
if isinstance(value, SecretStr):
return value.get_secret_value()
return value
class OAuth2Credentials(_BaseCredentials):
type: Literal["oauth2"] = "oauth2"
username: Optional[str]
"""Username of the third-party service user that these credentials belong to"""
access_token: SecretStr
access_token_expires_at: Optional[int]
"""Unix timestamp (seconds) indicating when the access token expires (if at all)"""
refresh_token: Optional[SecretStr]
refresh_token_expires_at: Optional[int]
"""Unix timestamp (seconds) indicating when the refresh token expires (if at all)"""
scopes: list[str]
metadata: dict[str, Any] = Field(default_factory=dict)
def bearer(self) -> str:
return f"Bearer {self.access_token.get_secret_value()}"
class APIKeyCredentials(_BaseCredentials):
type: Literal["api_key"] = "api_key"
api_key: SecretStr
expires_at: Optional[int]
"""Unix timestamp (seconds) indicating when the API key expires (if at all)"""
def bearer(self) -> str:
return f"Bearer {self.api_key.get_secret_value()}"
Credentials = Annotated[
OAuth2Credentials | APIKeyCredentials,
Field(discriminator="type"),
]
CredentialsType = Literal["api_key", "oauth2"]
class OAuthState(BaseModel):
token: str
provider: str
expires_at: int
code_verifier: Optional[str] = None
scopes: list[str]
"""Unix timestamp (seconds) indicating when this OAuth state expires"""
class UserMetadata(BaseModel):
integration_credentials: list[Credentials] = Field(default_factory=list)
integration_oauth_states: list[OAuthState] = Field(default_factory=list)
class UserMetadataRaw(TypedDict, total=False):
integration_credentials: list[dict]
integration_oauth_states: list[dict]
class UserIntegrations(BaseModel):
credentials: list[Credentials] = Field(default_factory=list)
oauth_states: list[OAuthState] = Field(default_factory=list)

View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
[[package]]
name = "aiohappyeyeballs"
@ -6,7 +6,6 @@ version = "2.4.0"
description = "Happy Eyeballs for asyncio"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"},
{file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"},
@ -18,7 +17,6 @@ version = "3.10.5"
description = "Async http client/server framework (asyncio)"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"},
{file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"},
@ -131,7 +129,6 @@ version = "1.3.1"
description = "aiosignal: a list of registered asynchronous callbacks"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
{file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
@ -146,7 +143,6 @@ version = "0.7.0"
description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
@ -158,7 +154,6 @@ version = "4.4.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
{file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
@ -181,12 +176,10 @@ version = "4.0.3"
description = "Timeout context manager for asyncio programs"
optional = false
python-versions = ">=3.7"
groups = ["main", "dev"]
files = [
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
]
markers = {main = "python_version < \"3.11\"", dev = "python_full_version < \"3.11.3\""}
[[package]]
name = "attrs"
@ -194,7 +187,6 @@ version = "24.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
{file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
@ -214,7 +206,6 @@ version = "5.5.0"
description = "Extensible memoizing collections and decorators"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"},
{file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"},
@ -226,7 +217,6 @@ version = "2024.8.30"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
@ -238,7 +228,6 @@ version = "3.3.2"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
groups = ["main"]
files = [
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
@ -338,7 +327,6 @@ version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
groups = ["main"]
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
@ -350,7 +338,6 @@ version = "1.2.14"
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
groups = ["main"]
files = [
{file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"},
{file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
@ -368,7 +355,6 @@ version = "2.1.0"
description = "A library to handle automated deprecations"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"},
{file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"},
@ -383,8 +369,6 @@ version = "1.2.2"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
groups = ["main"]
markers = "python_version < \"3.11\""
files = [
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
@ -399,7 +383,6 @@ version = "1.2.2"
description = "Dictionary with auto-expiring values for caching purposes"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "expiringdict-1.2.2-py3-none-any.whl", hash = "sha256:09a5d20bc361163e6432a874edd3179676e935eb81b925eccef48d409a8a45e8"},
{file = "expiringdict-1.2.2.tar.gz", hash = "sha256:300fb92a7e98f15b05cf9a856c1415b3bc4f2e132be07daa326da6414c23ee09"},
@ -414,7 +397,6 @@ version = "1.4.1"
description = "A list-like structure which implements collections.abc.MutableSequence"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"},
{file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"},
@ -501,7 +483,6 @@ version = "2.19.2"
description = "Google API client core library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_api_core-2.19.2-py3-none-any.whl", hash = "sha256:53ec0258f2837dd53bbd3d3df50f5359281b3cc13f800c941dd15a9b5a415af4"},
{file = "google_api_core-2.19.2.tar.gz", hash = "sha256:ca07de7e8aa1c98a8bfca9321890ad2340ef7f2eb136e558cee68f24b94b0a8f"},
@ -533,7 +514,6 @@ version = "2.34.0"
description = "Google Authentication Library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"},
{file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"},
@ -557,7 +537,6 @@ version = "1.4.5"
description = "Google Cloud Appengine Logging API client library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_cloud_appengine_logging-1.4.5-py2.py3-none-any.whl", hash = "sha256:344e0244404049b42164e4d6dc718ca2c81b393d066956e7cb85fd9407ed9c48"},
{file = "google_cloud_appengine_logging-1.4.5.tar.gz", hash = "sha256:de7d766e5d67b19fc5833974b505b32d2a5bbdfb283fd941e320e7cfdae4cb83"},
@ -575,7 +554,6 @@ version = "0.3.0"
description = "Google Cloud Audit Protos"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_cloud_audit_log-0.3.0-py2.py3-none-any.whl", hash = "sha256:8340793120a1d5aa143605def8704ecdcead15106f754ef1381ae3bab533722f"},
{file = "google_cloud_audit_log-0.3.0.tar.gz", hash = "sha256:901428b257020d8c1d1133e0fa004164a555e5a395c7ca3cdbb8486513df3a65"},
@ -591,7 +569,6 @@ version = "2.4.1"
description = "Google Cloud API client core library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"},
{file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"},
@ -610,7 +587,6 @@ version = "3.11.3"
description = "Stackdriver Logging API client library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_cloud_logging-3.11.3-py2.py3-none-any.whl", hash = "sha256:b8ec23f2998f76a58f8492db26a0f4151dd500425c3f08448586b85972f3c494"},
{file = "google_cloud_logging-3.11.3.tar.gz", hash = "sha256:0a73cd94118875387d4535371d9e9426861edef8e44fba1261e86782d5b8d54f"},
@ -636,7 +612,6 @@ version = "1.65.0"
description = "Common protobufs used in Google APIs"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"},
{file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"},
@ -655,7 +630,6 @@ version = "2.10.0"
description = "Python Client Library for Supabase Auth"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "gotrue-2.10.0-py3-none-any.whl", hash = "sha256:768e58207488e5184ffbdc4351b7280d913daf97962f4e9f2cca05c80004b042"},
{file = "gotrue-2.10.0.tar.gz", hash = "sha256:4edf4c251da3535f2b044e23deba221e848ca1210c17d0c7a9b19f79a1e3f3c0"},
@ -671,7 +645,6 @@ version = "0.13.1"
description = "IAM API client library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"},
{file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"},
@ -688,7 +661,6 @@ version = "1.66.1"
description = "HTTP/2-based RPC framework"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492"},
{file = "grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac"},
@ -747,7 +719,6 @@ version = "1.66.1"
description = "Status proto mapping for gRPC"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "grpcio_status-1.66.1-py3-none-any.whl", hash = "sha256:cf9ed0b4a83adbe9297211c95cb5488b0cd065707e812145b842c85c4782ff02"},
{file = "grpcio_status-1.66.1.tar.gz", hash = "sha256:b3f7d34ccc46d83fea5261eea3786174459f763c31f6e34f1d24eba6d515d024"},
@ -764,7 +735,6 @@ version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
@ -776,7 +746,6 @@ version = "4.1.0"
description = "HTTP/2 State-Machine based protocol implementation"
optional = false
python-versions = ">=3.6.1"
groups = ["main"]
files = [
{file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"},
{file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"},
@ -792,7 +761,6 @@ version = "4.0.0"
description = "Pure-Python HPACK header compression"
optional = false
python-versions = ">=3.6.1"
groups = ["main"]
files = [
{file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"},
{file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"},
@ -804,7 +772,6 @@ version = "1.0.5"
description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"},
{file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"},
@ -826,7 +793,6 @@ version = "0.27.2"
description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"},
{file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"},
@ -853,7 +819,6 @@ version = "6.0.1"
description = "HTTP/2 framing layer for Python"
optional = false
python-versions = ">=3.6.1"
groups = ["main"]
files = [
{file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"},
{file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"},
@ -865,7 +830,6 @@ version = "3.8"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"},
{file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"},
@ -877,7 +841,6 @@ version = "8.4.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"},
{file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"},
@ -897,7 +860,6 @@ version = "2.0.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
@ -909,7 +871,6 @@ version = "6.1.0"
description = "multidict implementation"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"},
{file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"},
@ -1014,7 +975,6 @@ version = "1.27.0"
description = "OpenTelemetry Python API"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"},
{file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"},
@ -1030,7 +990,6 @@ version = "24.1"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
@ -1042,7 +1001,6 @@ version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
@ -1058,7 +1016,6 @@ version = "0.18.0"
description = "PostgREST client for Python. This library provides an ORM interface to PostgREST."
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "postgrest-0.18.0-py3-none-any.whl", hash = "sha256:200baad0d23fee986b3a0ffd3e07bfe0cdd40e09760f11e8e13a6c0c2376d5fa"},
{file = "postgrest-0.18.0.tar.gz", hash = "sha256:29c1a94801a17eb9ad590189993fe5a7a6d8c1bfc11a3c9d0ce7ba146454ebb3"},
@ -1076,7 +1033,6 @@ version = "1.24.0"
description = "Beautiful, Pythonic protocol buffers."
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"},
{file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"},
@ -1094,7 +1050,6 @@ version = "5.28.0"
description = ""
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "protobuf-5.28.0-cp310-abi3-win32.whl", hash = "sha256:66c3edeedb774a3508ae70d87b3a19786445fe9a068dd3585e0cefa8a77b83d0"},
{file = "protobuf-5.28.0-cp310-abi3-win_amd64.whl", hash = "sha256:6d7cc9e60f976cf3e873acb9a40fed04afb5d224608ed5c1a105db4a3f09c5b6"},
@ -1115,7 +1070,6 @@ version = "0.6.1"
description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"},
{file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"},
@ -1127,7 +1081,6 @@ version = "0.4.1"
description = "A collection of ASN.1-based protocols modules"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"},
{file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"},
@ -1142,7 +1095,6 @@ version = "2.10.3"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"},
{file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"},
@ -1163,7 +1115,6 @@ version = "2.27.1"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"},
{file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"},
@ -1276,7 +1227,6 @@ version = "2.7.0"
description = "Settings management using Pydantic"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pydantic_settings-2.7.0-py3-none-any.whl", hash = "sha256:e00c05d5fa6cbbb227c84bd7487c5c1065084119b750df7c8c1a554aed236eb5"},
{file = "pydantic_settings-2.7.0.tar.gz", hash = "sha256:ac4bfd4a36831a48dbf8b2d9325425b549a0a6f18cea118436d728eb4f1c4d66"},
@ -1297,7 +1247,6 @@ version = "2.10.1"
description = "JSON Web Token implementation in Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"},
{file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"},
@ -1315,7 +1264,6 @@ version = "8.3.3"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
{file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
@ -1338,7 +1286,6 @@ version = "0.25.0"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pytest_asyncio-0.25.0-py3-none-any.whl", hash = "sha256:db5432d18eac6b7e28b46dcd9b69921b55c3b1086e85febfe04e70b18d9e81b3"},
{file = "pytest_asyncio-0.25.0.tar.gz", hash = "sha256:8c0610303c9e0442a5db8604505fc0f545456ba1528824842b37b4a626cbf609"},
@ -1357,7 +1304,6 @@ version = "3.14.0"
description = "Thin-wrapper around the mock package for easier use with pytest"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
{file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
@ -1375,7 +1321,6 @@ version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
@ -1390,7 +1335,6 @@ version = "1.0.1"
description = "Read key-value pairs from a .env file and set them as environment variables"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
@ -1405,7 +1349,6 @@ version = "2.0.2"
description = ""
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "realtime-2.0.2-py3-none-any.whl", hash = "sha256:2634c915bc38807f2013f21e8bcc4d2f79870dfd81460ddb9393883d0489928a"},
{file = "realtime-2.0.2.tar.gz", hash = "sha256:519da9325b3b8102139d51785013d592f6b2403d81fa21d838a0b0234723ed7d"},
@ -1423,7 +1366,6 @@ version = "5.2.1"
description = "Python client for Redis database and key-value store"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"},
{file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"},
@ -1442,7 +1384,6 @@ version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
@ -1464,7 +1405,6 @@ version = "4.9"
description = "Pure-Python RSA implementation"
optional = false
python-versions = ">=3.6,<4"
groups = ["main"]
files = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
@ -1475,30 +1415,29 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
version = "0.8.3"
version = "0.8.6"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "ruff-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:8d5d273ffffff0acd3db5bf626d4b131aa5a5ada1276126231c4174543ce20d6"},
{file = "ruff-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e4d66a21de39f15c9757d00c50c8cdd20ac84f55684ca56def7891a025d7e939"},
{file = "ruff-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c356e770811858bd20832af696ff6c7e884701115094f427b64b25093d6d932d"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c0a60a825e3e177116c84009d5ebaa90cf40dfab56e1358d1df4e29a9a14b13"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fb782f4db39501210ac093c79c3de581d306624575eddd7e4e13747e61ba18"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f26bc76a133ecb09a38b7868737eded6941b70a6d34ef53a4027e83913b6502"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:01b14b2f72a37390c1b13477c1c02d53184f728be2f3ffc3ace5b44e9e87b90d"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53babd6e63e31f4e96ec95ea0d962298f9f0d9cc5990a1bbb023a6baf2503a82"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ae441ce4cf925b7f363d33cd6570c51435972d697e3e58928973994e56e1452"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c65bc0cadce32255e93c57d57ecc2cca23149edd52714c0c5d6fa11ec328cd"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5be450bb18f23f0edc5a4e5585c17a56ba88920d598f04a06bd9fd76d324cb20"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8faeae3827eaa77f5721f09b9472a18c749139c891dbc17f45e72d8f2ca1f8fc"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:db503486e1cf074b9808403991663e4277f5c664d3fe237ee0d994d1305bb060"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6567be9fb62fbd7a099209257fef4ad2c3153b60579818b31a23c886ed4147ea"},
{file = "ruff-0.8.3-py3-none-win32.whl", hash = "sha256:19048f2f878f3ee4583fc6cb23fb636e48c2635e30fb2022b3a1cd293402f964"},
{file = "ruff-0.8.3-py3-none-win_amd64.whl", hash = "sha256:f7df94f57d7418fa7c3ffb650757e0c2b96cf2501a0b192c18e4fb5571dfada9"},
{file = "ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936"},
{file = "ruff-0.8.3.tar.gz", hash = "sha256:5e7558304353b84279042fc584a4f4cb8a07ae79b2bf3da1a7551d960b5626d3"},
{file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"},
{file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"},
{file = "ruff-0.8.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e88b8f6d901477c41559ba540beeb5a671e14cd29ebd5683903572f4b40a9807"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0509e8da430228236a18a677fcdb0c1f102dd26d5520f71f79b094963322ed25"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a7ddb221779871cf226100e677b5ea38c2d54e9e2c8ed847450ebbdf99b32d"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:248b1fb3f739d01d528cc50b35ee9c4812aa58cc5935998e776bf8ed5b251e75"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3c083c50390cf69e7e1b5a5a7303898966be973664ec0c4a4acea82c1d4315"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52d587092ab8df308635762386f45f4638badb0866355b2b86760f6d3c076188"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61323159cf21bc3897674e5adb27cd9e7700bab6b84de40d7be28c3d46dc67cf"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae4478b1471fc0c44ed52a6fb787e641a2ac58b1c1f91763bafbc2faddc5117"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c000a471d519b3e6cfc9c6680025d923b4ca140ce3e4612d1a2ef58e11f11fe"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9257aa841e9e8d9b727423086f0fa9a86b6b420fbf4bf9e1465d1250ce8e4d8d"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45a56f61b24682f6f6709636949ae8cc82ae229d8d773b4c76c09ec83964a95a"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:496dd38a53aa173481a7d8866bcd6451bd934d06976a2505028a50583e001b76"},
{file = "ruff-0.8.6-py3-none-win32.whl", hash = "sha256:e169ea1b9eae61c99b257dc83b9ee6c76f89042752cb2d83486a7d6e48e8f764"},
{file = "ruff-0.8.6-py3-none-win_amd64.whl", hash = "sha256:f1d70bef3d16fdc897ee290d7d20da3cbe4e26349f62e8a0274e7a3f4ce7a905"},
{file = "ruff-0.8.6-py3-none-win_arm64.whl", hash = "sha256:7d7fc2377a04b6e04ffe588caad613d0c460eb2ecba4c0ccbbfe2bc973cbc162"},
{file = "ruff-0.8.6.tar.gz", hash = "sha256:dcad24b81b62650b0eb8814f576fc65cfee8674772a6e24c9b747911801eeaa5"},
]
[[package]]
@ -1507,7 +1446,6 @@ version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
groups = ["main"]
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
@ -1519,7 +1457,6 @@ version = "1.3.1"
description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
@ -1531,7 +1468,6 @@ version = "0.9.0"
description = "Supabase Storage client for Python."
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "storage3-0.9.0-py3-none-any.whl", hash = "sha256:8b2fb91f0c61583a2f4eac74a8bae67e00d41ff38095c8a6cd3f2ce5e0ab76e7"},
{file = "storage3-0.9.0.tar.gz", hash = "sha256:e16697f60894c94e1d9df0d2e4af783c1b3f7dd08c9013d61978825c624188c4"},
@ -1547,8 +1483,6 @@ version = "0.4.15"
description = "An Enum that inherits from str."
optional = false
python-versions = "*"
groups = ["main"]
markers = "python_version < \"3.11\""
files = [
{file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"},
{file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"},
@ -1565,7 +1499,6 @@ version = "2.10.0"
description = "Supabase client for Python."
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "supabase-2.10.0-py3-none-any.whl", hash = "sha256:183fb23c04528593f8f81c24ceb8178f3a56bff40fec7ed873b6c55ebc2e420a"},
{file = "supabase-2.10.0.tar.gz", hash = "sha256:9ac095f8947bf60780e67c0edcbab53e2db3f6f3f022329397b093500bf2607c"},
@ -1585,7 +1518,6 @@ version = "0.7.0"
description = "Library for Supabase Functions"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "supafunc-0.7.0-py3-none-any.whl", hash = "sha256:4160260dc02bdd906be1e2ffd7cb3ae8b74ae437c892bb475352b6a99d9ff8eb"},
{file = "supafunc-0.7.0.tar.gz", hash = "sha256:5b1c415fba1395740b2b4eedd1d786384bd58b98f6333a11ba7889820a48b6a7"},
@ -1600,8 +1532,6 @@ version = "2.1.0"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "python_version < \"3.11\""
files = [
{file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"},
{file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"},
@ -1613,7 +1543,6 @@ version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
@ -1625,7 +1554,6 @@ version = "2.2.2"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
{file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
@ -1643,7 +1571,6 @@ version = "12.0"
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"},
{file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"},
@ -1725,7 +1652,6 @@ version = "1.16.0"
description = "Module for decorators, wrappers and monkey patching."
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
{file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
@ -1805,7 +1731,6 @@ version = "1.11.1"
description = "Yet another URL library"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "yarl-1.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:400cd42185f92de559d29eeb529e71d80dfbd2f45c36844914a4a34297ca6f00"},
{file = "yarl-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8258c86f47e080a258993eed877d579c71da7bda26af86ce6c2d2d072c11320d"},
@ -1911,7 +1836,6 @@ version = "3.20.1"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"},
{file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"},
@ -1926,6 +1850,6 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
type = ["pytest-mypy"]
[metadata]
lock-version = "2.1"
lock-version = "2.0"
python-versions = ">=3.10,<4.0"
content-hash = "13a36d3be675cab4a3eb2e6a62a1b08df779bded4c7b9164d8be300dc08748d0"
content-hash = "bf1b0125759dadb1369fff05ffba64fea3e82b9b7a43d0068e1c80974a4ebc1c"

View File

@ -21,7 +21,7 @@ supabase = "^2.10.0"
[tool.poetry.group.dev.dependencies]
redis = "^5.2.1"
ruff = "^0.8.3"
ruff = "^0.8.6"
[build-system]
requires = ["poetry-core"]

View File

@ -58,6 +58,21 @@ GITHUB_CLIENT_SECRET=
GOOGLE_CLIENT_ID=
GOOGLE_CLIENT_SECRET=
# Twitter (X) OAuth 2.0 with PKCE Configuration
# 1. Create a Twitter Developer Account:
# - Visit https://developer.x.com/en and sign up
# 2. Set up your application:
# - Navigate to Developer Portal > Projects > Create Project
# - Add a new app to your project
# 3. Configure app settings:
# - App Permissions: Read + Write + Direct Messages
# - App Type: Web App, Automated App or Bot
# - OAuth 2.0 Callback URL: http://localhost:3000/auth/integrations/oauth_callback
# - Save your Client ID and Client Secret below
TWITTER_CLIENT_ID=
TWITTER_CLIENT_SECRET=
## ===== OPTIONAL API KEYS ===== ##
# LLM

View File

@ -56,15 +56,24 @@ class SendWebRequestBlock(Block):
)
def run(self, input_data: Input, **kwargs) -> BlockOutput:
if isinstance(input_data.body, str):
input_data.body = json.loads(input_data.body)
body = input_data.body
if input_data.json_format:
if isinstance(body, str):
try:
# Try to parse as JSON first
body = json.loads(body)
except json.JSONDecodeError:
# If it's not valid JSON and just plain text,
# we should send it as plain text instead
input_data.json_format = False
response = requests.request(
input_data.method.value,
input_data.url,
headers=input_data.headers,
json=input_data.body if input_data.json_format else None,
data=input_data.body if not input_data.json_format else None,
json=body if input_data.json_format else None,
data=body if not input_data.json_format else None,
)
result = response.json() if input_data.json_format else response.text

View File

@ -0,0 +1,32 @@
from typing import Literal
from pydantic import SecretStr
from backend.data.model import APIKeyCredentials, CredentialsField, CredentialsMetaInput
from backend.integrations.providers import ProviderName
NvidiaCredentials = APIKeyCredentials
NvidiaCredentialsInput = CredentialsMetaInput[
Literal[ProviderName.NVIDIA],
Literal["api_key"],
]
TEST_CREDENTIALS = APIKeyCredentials(
id="01234567-89ab-cdef-0123-456789abcdef",
provider="nvidia",
api_key=SecretStr("mock-nvidia-api-key"),
title="Mock Nvidia API key",
expires_at=None,
)
TEST_CREDENTIALS_INPUT = {
"provider": TEST_CREDENTIALS.provider,
"id": TEST_CREDENTIALS.id,
"type": TEST_CREDENTIALS.type,
"title": TEST_CREDENTIALS.title,
}
def NvidiaCredentialsField() -> NvidiaCredentialsInput:
"""Creates an Nvidia credentials input on a block."""
return CredentialsField(description="The Nvidia integration requires an API Key.")

View File

@ -0,0 +1,90 @@
from backend.blocks.nvidia._auth import (
NvidiaCredentials,
NvidiaCredentialsField,
NvidiaCredentialsInput,
)
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
from backend.util.request import requests
class NvidiaDeepfakeDetectBlock(Block):
class Input(BlockSchema):
credentials: NvidiaCredentialsInput = NvidiaCredentialsField()
image_base64: str = SchemaField(
description="Image to analyze for deepfakes", image_upload=True
)
return_image: bool = SchemaField(
description="Whether to return the processed image with markings",
default=False,
)
class Output(BlockSchema):
status: str = SchemaField(
description="Detection status (SUCCESS, ERROR, CONTENT_FILTERED)",
default="",
)
image: str = SchemaField(
description="Processed image with detection markings (if return_image=True)",
default="",
image_output=True,
)
is_deepfake: float = SchemaField(
description="Probability that the image is a deepfake (0-1)",
default=0.0,
)
def __init__(self):
super().__init__(
id="8c7d0d67-e79c-44f6-92a1-c2600c8aac7f",
description="Detects potential deepfakes in images using Nvidia's AI API",
categories={BlockCategory.SAFETY},
input_schema=NvidiaDeepfakeDetectBlock.Input,
output_schema=NvidiaDeepfakeDetectBlock.Output,
)
def run(
self, input_data: Input, *, credentials: NvidiaCredentials, **kwargs
) -> BlockOutput:
url = "https://ai.api.nvidia.com/v1/cv/hive/deepfake-image-detection"
headers = {
"accept": "application/json",
"content-type": "application/json",
"Authorization": f"Bearer {credentials.api_key.get_secret_value()}",
}
image_data = f"data:image/jpeg;base64,{input_data.image_base64}"
payload = {
"input": [image_data],
"return_image": input_data.return_image,
}
try:
response = requests.post(url, headers=headers, json=payload)
response.raise_for_status()
data = response.json()
result = data.get("data", [{}])[0]
# Get deepfake probability from first bounding box if any
deepfake_prob = 0.0
if result.get("bounding_boxes"):
deepfake_prob = result["bounding_boxes"][0].get("is_deepfake", 0.0)
yield "status", result.get("status", "ERROR")
yield "is_deepfake", deepfake_prob
if input_data.return_image:
image_data = result.get("image", "")
output_data = f"data:image/jpeg;base64,{image_data}"
yield "image", output_data
else:
yield "image", ""
except Exception as e:
yield "error", str(e)
yield "status", "ERROR"
yield "is_deepfake", 0.0
yield "image", ""

View File

@ -0,0 +1,60 @@
from typing import Literal
from pydantic import SecretStr
from backend.data.model import (
CredentialsField,
CredentialsMetaInput,
OAuth2Credentials,
ProviderName,
)
from backend.integrations.oauth.twitter import TwitterOAuthHandler
from backend.util.settings import Secrets
# --8<-- [start:TwitterOAuthIsConfigured]
secrets = Secrets()
TWITTER_OAUTH_IS_CONFIGURED = bool(
secrets.twitter_client_id and secrets.twitter_client_secret
)
# --8<-- [end:TwitterOAuthIsConfigured]
TwitterCredentials = OAuth2Credentials
TwitterCredentialsInput = CredentialsMetaInput[
Literal[ProviderName.TWITTER], Literal["oauth2"]
]
# Currently, We are getting all the permission from the Twitter API initally
# In future, If we need to add incremental permission, we can use these requested_scopes
def TwitterCredentialsField(scopes: list[str]) -> TwitterCredentialsInput:
"""
Creates a Twitter credentials input on a block.
Params:
scopes: The authorization scopes needed for the block to work.
"""
return CredentialsField(
# required_scopes=set(scopes),
required_scopes=set(TwitterOAuthHandler.DEFAULT_SCOPES + scopes),
description="The Twitter integration requires OAuth2 authentication.",
)
TEST_CREDENTIALS = OAuth2Credentials(
id="01234567-89ab-cdef-0123-456789abcdef",
provider="twitter",
access_token=SecretStr("mock-twitter-access-token"),
refresh_token=SecretStr("mock-twitter-refresh-token"),
access_token_expires_at=1234567890,
scopes=["tweet.read", "tweet.write", "users.read", "offline.access"],
title="Mock Twitter OAuth2 Credentials",
username="mock-twitter-username",
refresh_token_expires_at=1234567890,
)
TEST_CREDENTIALS_INPUT = {
"provider": TEST_CREDENTIALS.provider,
"id": TEST_CREDENTIALS.id,
"type": TEST_CREDENTIALS.type,
"title": TEST_CREDENTIALS.title,
}

View File

@ -0,0 +1,418 @@
from datetime import datetime
from typing import Any, Dict
from backend.blocks.twitter._mappers import (
get_backend_expansion,
get_backend_field,
get_backend_list_expansion,
get_backend_list_field,
get_backend_media_field,
get_backend_place_field,
get_backend_poll_field,
get_backend_space_expansion,
get_backend_space_field,
get_backend_user_field,
)
from backend.blocks.twitter._types import ( # DMEventFieldFilter,
DMEventExpansionFilter,
DMEventTypeFilter,
DMMediaFieldFilter,
DMTweetFieldFilter,
ExpansionFilter,
ListExpansionsFilter,
ListFieldsFilter,
SpaceExpansionsFilter,
SpaceFieldsFilter,
TweetFieldsFilter,
TweetMediaFieldsFilter,
TweetPlaceFieldsFilter,
TweetPollFieldsFilter,
TweetReplySettingsFilter,
TweetUserFieldsFilter,
UserExpansionsFilter,
)
# Common Builder
class TweetExpansionsBuilder:
def __init__(self, param: Dict[str, Any]):
self.params: Dict[str, Any] = param
def add_expansions(self, expansions: ExpansionFilter | None):
if expansions:
filtered_expansions = [
name for name, value in expansions.dict().items() if value is True
]
if filtered_expansions:
self.params["expansions"] = ",".join(
[get_backend_expansion(exp) for exp in filtered_expansions]
)
return self
def add_media_fields(self, media_fields: TweetMediaFieldsFilter | None):
if media_fields:
filtered_fields = [
name for name, value in media_fields.dict().items() if value is True
]
if filtered_fields:
self.params["media.fields"] = ",".join(
[get_backend_media_field(field) for field in filtered_fields]
)
return self
def add_place_fields(self, place_fields: TweetPlaceFieldsFilter | None):
if place_fields:
filtered_fields = [
name for name, value in place_fields.dict().items() if value is True
]
if filtered_fields:
self.params["place.fields"] = ",".join(
[get_backend_place_field(field) for field in filtered_fields]
)
return self
def add_poll_fields(self, poll_fields: TweetPollFieldsFilter | None):
if poll_fields:
filtered_fields = [
name for name, value in poll_fields.dict().items() if value is True
]
if filtered_fields:
self.params["poll.fields"] = ",".join(
[get_backend_poll_field(field) for field in filtered_fields]
)
return self
def add_tweet_fields(self, tweet_fields: TweetFieldsFilter | None):
if tweet_fields:
filtered_fields = [
name for name, value in tweet_fields.dict().items() if value is True
]
if filtered_fields:
self.params["tweet.fields"] = ",".join(
[get_backend_field(field) for field in filtered_fields]
)
return self
def add_user_fields(self, user_fields: TweetUserFieldsFilter | None):
if user_fields:
filtered_fields = [
name for name, value in user_fields.dict().items() if value is True
]
if filtered_fields:
self.params["user.fields"] = ",".join(
[get_backend_user_field(field) for field in filtered_fields]
)
return self
def build(self):
return self.params
class UserExpansionsBuilder:
def __init__(self, param: Dict[str, Any]):
self.params: Dict[str, Any] = param
def add_expansions(self, expansions: UserExpansionsFilter | None):
if expansions:
filtered_expansions = [
name for name, value in expansions.dict().items() if value is True
]
if filtered_expansions:
self.params["expansions"] = ",".join(filtered_expansions)
return self
def add_tweet_fields(self, tweet_fields: TweetFieldsFilter | None):
if tweet_fields:
filtered_fields = [
name for name, value in tweet_fields.dict().items() if value is True
]
if filtered_fields:
self.params["tweet.fields"] = ",".join(
[get_backend_field(field) for field in filtered_fields]
)
return self
def add_user_fields(self, user_fields: TweetUserFieldsFilter | None):
if user_fields:
filtered_fields = [
name for name, value in user_fields.dict().items() if value is True
]
if filtered_fields:
self.params["user.fields"] = ",".join(
[get_backend_user_field(field) for field in filtered_fields]
)
return self
def build(self):
return self.params
class ListExpansionsBuilder:
def __init__(self, param: Dict[str, Any]):
self.params: Dict[str, Any] = param
def add_expansions(self, expansions: ListExpansionsFilter | None):
if expansions:
filtered_expansions = [
name for name, value in expansions.dict().items() if value is True
]
if filtered_expansions:
self.params["expansions"] = ",".join(
[get_backend_list_expansion(exp) for exp in filtered_expansions]
)
return self
def add_list_fields(self, list_fields: ListFieldsFilter | None):
if list_fields:
filtered_fields = [
name for name, value in list_fields.dict().items() if value is True
]
if filtered_fields:
self.params["list.fields"] = ",".join(
[get_backend_list_field(field) for field in filtered_fields]
)
return self
def add_user_fields(self, user_fields: TweetUserFieldsFilter | None):
if user_fields:
filtered_fields = [
name for name, value in user_fields.dict().items() if value is True
]
if filtered_fields:
self.params["user.fields"] = ",".join(
[get_backend_user_field(field) for field in filtered_fields]
)
return self
def build(self):
return self.params
class SpaceExpansionsBuilder:
def __init__(self, param: Dict[str, Any]):
self.params: Dict[str, Any] = param
def add_expansions(self, expansions: SpaceExpansionsFilter | None):
if expansions:
filtered_expansions = [
name for name, value in expansions.dict().items() if value is True
]
if filtered_expansions:
self.params["expansions"] = ",".join(
[get_backend_space_expansion(exp) for exp in filtered_expansions]
)
return self
def add_space_fields(self, space_fields: SpaceFieldsFilter | None):
if space_fields:
filtered_fields = [
name for name, value in space_fields.dict().items() if value is True
]
if filtered_fields:
self.params["space.fields"] = ",".join(
[get_backend_space_field(field) for field in filtered_fields]
)
return self
def add_user_fields(self, user_fields: TweetUserFieldsFilter | None):
if user_fields:
filtered_fields = [
name for name, value in user_fields.dict().items() if value is True
]
if filtered_fields:
self.params["user.fields"] = ",".join(
[get_backend_user_field(field) for field in filtered_fields]
)
return self
def build(self):
return self.params
class TweetDurationBuilder:
def __init__(self, param: Dict[str, Any]):
self.params: Dict[str, Any] = param
def add_start_time(self, start_time: datetime | None):
if start_time:
self.params["start_time"] = start_time
return self
def add_end_time(self, end_time: datetime | None):
if end_time:
self.params["end_time"] = end_time
return self
def add_since_id(self, since_id: str | None):
if since_id:
self.params["since_id"] = since_id
return self
def add_until_id(self, until_id: str | None):
if until_id:
self.params["until_id"] = until_id
return self
def add_sort_order(self, sort_order: str | None):
if sort_order:
self.params["sort_order"] = sort_order
return self
def build(self):
return self.params
class DMExpansionsBuilder:
def __init__(self, param: Dict[str, Any]):
self.params: Dict[str, Any] = param
def add_expansions(self, expansions: DMEventExpansionFilter):
if expansions:
filtered_expansions = [
name for name, value in expansions.dict().items() if value is True
]
if filtered_expansions:
self.params["expansions"] = ",".join(filtered_expansions)
return self
def add_event_types(self, event_types: DMEventTypeFilter):
if event_types:
filtered_types = [
name for name, value in event_types.dict().items() if value is True
]
if filtered_types:
self.params["event_types"] = ",".join(filtered_types)
return self
def add_media_fields(self, media_fields: DMMediaFieldFilter):
if media_fields:
filtered_fields = [
name for name, value in media_fields.dict().items() if value is True
]
if filtered_fields:
self.params["media.fields"] = ",".join(filtered_fields)
return self
def add_tweet_fields(self, tweet_fields: DMTweetFieldFilter):
if tweet_fields:
filtered_fields = [
name for name, value in tweet_fields.dict().items() if value is True
]
if filtered_fields:
self.params["tweet.fields"] = ",".join(filtered_fields)
return self
def add_user_fields(self, user_fields: TweetUserFieldsFilter):
if user_fields:
filtered_fields = [
name for name, value in user_fields.dict().items() if value is True
]
if filtered_fields:
self.params["user.fields"] = ",".join(filtered_fields)
return self
def build(self):
return self.params
# Specific Builders
class TweetSearchBuilder:
def __init__(self):
self.params: Dict[str, Any] = {"user_auth": False}
def add_query(self, query: str):
if query:
self.params["query"] = query
return self
def add_pagination(self, max_results: int, pagination: str | None):
if max_results:
self.params["max_results"] = max_results
if pagination:
self.params["pagination_token"] = pagination
return self
def build(self):
return self.params
class TweetPostBuilder:
def __init__(self):
self.params: Dict[str, Any] = {"user_auth": False}
def add_text(self, text: str | None):
if text:
self.params["text"] = text
return self
def add_media(self, media_ids: list, tagged_user_ids: list):
if media_ids:
self.params["media_ids"] = media_ids
if tagged_user_ids:
self.params["media_tagged_user_ids"] = tagged_user_ids
return self
def add_deep_link(self, link: str):
if link:
self.params["direct_message_deep_link"] = link
return self
def add_super_followers(self, for_super_followers: bool):
if for_super_followers:
self.params["for_super_followers_only"] = for_super_followers
return self
def add_place(self, place_id: str):
if place_id:
self.params["place_id"] = place_id
return self
def add_poll_options(self, poll_options: list):
if poll_options:
self.params["poll_options"] = poll_options
return self
def add_poll_duration(self, poll_duration_minutes: int):
if poll_duration_minutes:
self.params["poll_duration_minutes"] = poll_duration_minutes
return self
def add_quote(self, quote_id: str):
if quote_id:
self.params["quote_tweet_id"] = quote_id
return self
def add_reply_settings(
self,
exclude_user_ids: list,
reply_to_id: str,
settings: TweetReplySettingsFilter,
):
if exclude_user_ids:
self.params["exclude_reply_user_ids"] = exclude_user_ids
if reply_to_id:
self.params["in_reply_to_tweet_id"] = reply_to_id
if settings.All_Users:
self.params["reply_settings"] = None
elif settings.Following_Users_Only:
self.params["reply_settings"] = "following"
elif settings.Mentioned_Users_Only:
self.params["reply_settings"] = "mentionedUsers"
return self
def build(self):
return self.params
class TweetGetsBuilder:
def __init__(self):
self.params: Dict[str, Any] = {"user_auth": False}
def add_id(self, tweet_id: list[str]):
self.params["id"] = tweet_id
return self
def build(self):
return self.params

View File

@ -0,0 +1,234 @@
# -------------- Tweets -----------------
# Tweet Expansions
EXPANSION_FRONTEND_TO_BACKEND_MAPPING = {
"Poll_IDs": "attachments.poll_ids",
"Media_Keys": "attachments.media_keys",
"Author_User_ID": "author_id",
"Edit_History_Tweet_IDs": "edit_history_tweet_ids",
"Mentioned_Usernames": "entities.mentions.username",
"Place_ID": "geo.place_id",
"Reply_To_User_ID": "in_reply_to_user_id",
"Referenced_Tweet_ID": "referenced_tweets.id",
"Referenced_Tweet_Author_ID": "referenced_tweets.id.author_id",
}
def get_backend_expansion(frontend_key: str) -> str:
result = EXPANSION_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
if result is None:
raise KeyError(f"Invalid expansion key: {frontend_key}")
return result
# TweetReplySettings
REPLY_SETTINGS_FRONTEND_TO_BACKEND_MAPPING = {
"Mentioned_Users_Only": "mentionedUsers",
"Following_Users_Only": "following",
"All_Users": "all",
}
# TweetUserFields
def get_backend_reply_setting(frontend_key: str) -> str:
result = REPLY_SETTINGS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
if result is None:
raise KeyError(f"Invalid reply setting key: {frontend_key}")
return result
USER_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
"Account_Creation_Date": "created_at",
"User_Bio": "description",
"User_Entities": "entities",
"User_ID": "id",
"User_Location": "location",
"Latest_Tweet_ID": "most_recent_tweet_id",
"Display_Name": "name",
"Pinned_Tweet_ID": "pinned_tweet_id",
"Profile_Picture_URL": "profile_image_url",
"Is_Protected_Account": "protected",
"Account_Statistics": "public_metrics",
"Profile_URL": "url",
"Username": "username",
"Is_Verified": "verified",
"Verification_Type": "verified_type",
"Content_Withholding_Info": "withheld",
}
def get_backend_user_field(frontend_key: str) -> str:
result = USER_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
if result is None:
raise KeyError(f"Invalid user field key: {frontend_key}")
return result
# TweetFields
FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
"Tweet_Attachments": "attachments",
"Author_ID": "author_id",
"Context_Annotations": "context_annotations",
"Conversation_ID": "conversation_id",
"Creation_Time": "created_at",
"Edit_Controls": "edit_controls",
"Tweet_Entities": "entities",
"Geographic_Location": "geo",
"Tweet_ID": "id",
"Reply_To_User_ID": "in_reply_to_user_id",
"Language": "lang",
"Public_Metrics": "public_metrics",
"Sensitive_Content_Flag": "possibly_sensitive",
"Referenced_Tweets": "referenced_tweets",
"Reply_Settings": "reply_settings",
"Tweet_Source": "source",
"Tweet_Text": "text",
"Withheld_Content": "withheld",
}
def get_backend_field(frontend_key: str) -> str:
result = FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
if result is None:
raise KeyError(f"Invalid field key: {frontend_key}")
return result
# TweetPollFields
POLL_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
"Duration_Minutes": "duration_minutes",
"End_DateTime": "end_datetime",
"Poll_ID": "id",
"Poll_Options": "options",
"Voting_Status": "voting_status",
}
def get_backend_poll_field(frontend_key: str) -> str:
result = POLL_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
if result is None:
raise KeyError(f"Invalid poll field key: {frontend_key}")
return result
PLACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
"Contained_Within_Places": "contained_within",
"Country": "country",
"Country_Code": "country_code",
"Full_Location_Name": "full_name",
"Geographic_Coordinates": "geo",
"Place_ID": "id",
"Place_Name": "name",
"Place_Type": "place_type",
}
def get_backend_place_field(frontend_key: str) -> str:
result = PLACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
if result is None:
raise KeyError(f"Invalid place field key: {frontend_key}")
return result
# TweetMediaFields
MEDIA_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
"Duration_in_Milliseconds": "duration_ms",
"Height": "height",
"Media_Key": "media_key",
"Preview_Image_URL": "preview_image_url",
"Media_Type": "type",
"Media_URL": "url",
"Width": "width",
"Public_Metrics": "public_metrics",
"Non_Public_Metrics": "non_public_metrics",
"Organic_Metrics": "organic_metrics",
"Promoted_Metrics": "promoted_metrics",
"Alternative_Text": "alt_text",
"Media_Variants": "variants",
}
def get_backend_media_field(frontend_key: str) -> str:
result = MEDIA_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
if result is None:
raise KeyError(f"Invalid media field key: {frontend_key}")
return result
# -------------- Spaces -----------------
# SpaceExpansions
EXPANSION_FRONTEND_TO_BACKEND_MAPPING_SPACE = {
"Invited_Users": "invited_user_ids",
"Speakers": "speaker_ids",
"Creator": "creator_id",
"Hosts": "host_ids",
"Topics": "topic_ids",
}
def get_backend_space_expansion(frontend_key: str) -> str:
result = EXPANSION_FRONTEND_TO_BACKEND_MAPPING_SPACE.get(frontend_key)
if result is None:
raise KeyError(f"Invalid expansion key: {frontend_key}")
return result
# SpaceFields
SPACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
"Space_ID": "id",
"Space_State": "state",
"Creation_Time": "created_at",
"End_Time": "ended_at",
"Host_User_IDs": "host_ids",
"Language": "lang",
"Is_Ticketed": "is_ticketed",
"Invited_User_IDs": "invited_user_ids",
"Participant_Count": "participant_count",
"Subscriber_Count": "subscriber_count",
"Scheduled_Start_Time": "scheduled_start",
"Speaker_User_IDs": "speaker_ids",
"Start_Time": "started_at",
"Space_Title": "title",
"Topic_IDs": "topic_ids",
"Last_Updated_Time": "updated_at",
}
def get_backend_space_field(frontend_key: str) -> str:
result = SPACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
if result is None:
raise KeyError(f"Invalid space field key: {frontend_key}")
return result
# -------------- List Expansions -----------------
# ListExpansions
LIST_EXPANSION_FRONTEND_TO_BACKEND_MAPPING = {"List_Owner_ID": "owner_id"}
def get_backend_list_expansion(frontend_key: str) -> str:
result = LIST_EXPANSION_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
if result is None:
raise KeyError(f"Invalid list expansion key: {frontend_key}")
return result
LIST_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
"List_ID": "id",
"List_Name": "name",
"Creation_Date": "created_at",
"Description": "description",
"Follower_Count": "follower_count",
"Member_Count": "member_count",
"Is_Private": "private",
"Owner_ID": "owner_id",
}
def get_backend_list_field(frontend_key: str) -> str:
result = LIST_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
if result is None:
raise KeyError(f"Invalid list field key: {frontend_key}")
return result

View File

@ -0,0 +1,76 @@
from typing import Any, Dict, List
class BaseSerializer:
@staticmethod
def _serialize_value(value: Any) -> Any:
"""Helper method to serialize individual values"""
if hasattr(value, "data"):
return value.data
return value
class IncludesSerializer(BaseSerializer):
@classmethod
def serialize(cls, includes: Dict[str, Any]) -> Dict[str, Any]:
"""Serializes the includes dictionary"""
if not includes:
return {}
serialized_includes = {}
for key, value in includes.items():
if isinstance(value, list):
serialized_includes[key] = [
cls._serialize_value(item) for item in value
]
else:
serialized_includes[key] = cls._serialize_value(value)
return serialized_includes
class ResponseDataSerializer(BaseSerializer):
@classmethod
def serialize_dict(cls, item: Dict[str, Any]) -> Dict[str, Any]:
"""Serializes a single dictionary item"""
serialized_item = {}
if hasattr(item, "__dict__"):
items = item.__dict__.items()
else:
items = item.items()
for key, value in items:
if isinstance(value, list):
serialized_item[key] = [
cls._serialize_value(sub_item) for sub_item in value
]
else:
serialized_item[key] = cls._serialize_value(value)
return serialized_item
@classmethod
def serialize_list(cls, data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""Serializes a list of dictionary items"""
return [cls.serialize_dict(item) for item in data]
class ResponseSerializer:
@classmethod
def serialize(cls, response) -> Dict[str, Any]:
"""Main serializer that handles both data and includes"""
result = {"data": None, "included": {}}
# Handle response.data
if response.data:
if isinstance(response.data, list):
result["data"] = ResponseDataSerializer.serialize_list(response.data)
else:
result["data"] = ResponseDataSerializer.serialize_dict(response.data)
# Handle includes
if hasattr(response, "includes") and response.includes:
result["included"] = IncludesSerializer.serialize(response.includes)
return result

View File

@ -0,0 +1,443 @@
from datetime import datetime
from enum import Enum
from pydantic import BaseModel
from backend.data.block import BlockSchema
from backend.data.model import SchemaField
# -------------- Tweets -----------------
class TweetReplySettingsFilter(BaseModel):
Mentioned_Users_Only: bool = False
Following_Users_Only: bool = False
All_Users: bool = False
class TweetUserFieldsFilter(BaseModel):
Account_Creation_Date: bool = False
User_Bio: bool = False
User_Entities: bool = False
User_ID: bool = False
User_Location: bool = False
Latest_Tweet_ID: bool = False
Display_Name: bool = False
Pinned_Tweet_ID: bool = False
Profile_Picture_URL: bool = False
Is_Protected_Account: bool = False
Account_Statistics: bool = False
Profile_URL: bool = False
Username: bool = False
Is_Verified: bool = False
Verification_Type: bool = False
Content_Withholding_Info: bool = False
class TweetFieldsFilter(BaseModel):
Tweet_Attachments: bool = False
Author_ID: bool = False
Context_Annotations: bool = False
Conversation_ID: bool = False
Creation_Time: bool = False
Edit_Controls: bool = False
Tweet_Entities: bool = False
Geographic_Location: bool = False
Tweet_ID: bool = False
Reply_To_User_ID: bool = False
Language: bool = False
Public_Metrics: bool = False
Sensitive_Content_Flag: bool = False
Referenced_Tweets: bool = False
Reply_Settings: bool = False
Tweet_Source: bool = False
Tweet_Text: bool = False
Withheld_Content: bool = False
class PersonalTweetFieldsFilter(BaseModel):
attachments: bool = False
author_id: bool = False
context_annotations: bool = False
conversation_id: bool = False
created_at: bool = False
edit_controls: bool = False
entities: bool = False
geo: bool = False
id: bool = False
in_reply_to_user_id: bool = False
lang: bool = False
non_public_metrics: bool = False
public_metrics: bool = False
organic_metrics: bool = False
promoted_metrics: bool = False
possibly_sensitive: bool = False
referenced_tweets: bool = False
reply_settings: bool = False
source: bool = False
text: bool = False
withheld: bool = False
class TweetPollFieldsFilter(BaseModel):
Duration_Minutes: bool = False
End_DateTime: bool = False
Poll_ID: bool = False
Poll_Options: bool = False
Voting_Status: bool = False
class TweetPlaceFieldsFilter(BaseModel):
Contained_Within_Places: bool = False
Country: bool = False
Country_Code: bool = False
Full_Location_Name: bool = False
Geographic_Coordinates: bool = False
Place_ID: bool = False
Place_Name: bool = False
Place_Type: bool = False
class TweetMediaFieldsFilter(BaseModel):
Duration_in_Milliseconds: bool = False
Height: bool = False
Media_Key: bool = False
Preview_Image_URL: bool = False
Media_Type: bool = False
Media_URL: bool = False
Width: bool = False
Public_Metrics: bool = False
Non_Public_Metrics: bool = False
Organic_Metrics: bool = False
Promoted_Metrics: bool = False
Alternative_Text: bool = False
Media_Variants: bool = False
class ExpansionFilter(BaseModel):
Poll_IDs: bool = False
Media_Keys: bool = False
Author_User_ID: bool = False
Edit_History_Tweet_IDs: bool = False
Mentioned_Usernames: bool = False
Place_ID: bool = False
Reply_To_User_ID: bool = False
Referenced_Tweet_ID: bool = False
Referenced_Tweet_Author_ID: bool = False
class TweetExcludesFilter(BaseModel):
retweets: bool = False
replies: bool = False
# -------------- Users -----------------
class UserExpansionsFilter(BaseModel):
pinned_tweet_id: bool = False
# -------------- DM's' -----------------
class DMEventFieldFilter(BaseModel):
id: bool = False
text: bool = False
event_type: bool = False
created_at: bool = False
dm_conversation_id: bool = False
sender_id: bool = False
participant_ids: bool = False
referenced_tweets: bool = False
attachments: bool = False
class DMEventTypeFilter(BaseModel):
MessageCreate: bool = False
ParticipantsJoin: bool = False
ParticipantsLeave: bool = False
class DMEventExpansionFilter(BaseModel):
attachments_media_keys: bool = False
referenced_tweets_id: bool = False
sender_id: bool = False
participant_ids: bool = False
class DMMediaFieldFilter(BaseModel):
duration_ms: bool = False
height: bool = False
media_key: bool = False
preview_image_url: bool = False
type: bool = False
url: bool = False
width: bool = False
public_metrics: bool = False
alt_text: bool = False
variants: bool = False
class DMTweetFieldFilter(BaseModel):
attachments: bool = False
author_id: bool = False
context_annotations: bool = False
conversation_id: bool = False
created_at: bool = False
edit_controls: bool = False
entities: bool = False
geo: bool = False
id: bool = False
in_reply_to_user_id: bool = False
lang: bool = False
public_metrics: bool = False
possibly_sensitive: bool = False
referenced_tweets: bool = False
reply_settings: bool = False
source: bool = False
text: bool = False
withheld: bool = False
# -------------- Spaces -----------------
class SpaceExpansionsFilter(BaseModel):
Invited_Users: bool = False
Speakers: bool = False
Creator: bool = False
Hosts: bool = False
Topics: bool = False
class SpaceFieldsFilter(BaseModel):
Space_ID: bool = False
Space_State: bool = False
Creation_Time: bool = False
End_Time: bool = False
Host_User_IDs: bool = False
Language: bool = False
Is_Ticketed: bool = False
Invited_User_IDs: bool = False
Participant_Count: bool = False
Subscriber_Count: bool = False
Scheduled_Start_Time: bool = False
Speaker_User_IDs: bool = False
Start_Time: bool = False
Space_Title: bool = False
Topic_IDs: bool = False
Last_Updated_Time: bool = False
class SpaceStatesFilter(str, Enum):
live = "live"
scheduled = "scheduled"
all = "all"
# -------------- List Expansions -----------------
class ListExpansionsFilter(BaseModel):
List_Owner_ID: bool = False
class ListFieldsFilter(BaseModel):
List_ID: bool = False
List_Name: bool = False
Creation_Date: bool = False
Description: bool = False
Follower_Count: bool = False
Member_Count: bool = False
Is_Private: bool = False
Owner_ID: bool = False
# --------- [Input Types] -------------
class TweetExpansionInputs(BlockSchema):
expansions: ExpansionFilter | None = SchemaField(
description="Choose what extra information you want to get with your tweets. For example:\n- Select 'Media_Keys' to get media details\n- Select 'Author_User_ID' to get user information\n- Select 'Place_ID' to get location details",
placeholder="Pick the extra information you want to see",
default=None,
advanced=True,
)
media_fields: TweetMediaFieldsFilter | None = SchemaField(
description="Select what media information you want to see (images, videos, etc). To use this, you must first select 'Media_Keys' in the expansions above.",
placeholder="Choose what media details you want to see",
default=None,
advanced=True,
)
place_fields: TweetPlaceFieldsFilter | None = SchemaField(
description="Select what location information you want to see (country, coordinates, etc). To use this, you must first select 'Place_ID' in the expansions above.",
placeholder="Choose what location details you want to see",
default=None,
advanced=True,
)
poll_fields: TweetPollFieldsFilter | None = SchemaField(
description="Select what poll information you want to see (options, voting status, etc). To use this, you must first select 'Poll_IDs' in the expansions above.",
placeholder="Choose what poll details you want to see",
default=None,
advanced=True,
)
tweet_fields: TweetFieldsFilter | None = SchemaField(
description="Select what tweet information you want to see. For referenced tweets (like retweets), select 'Referenced_Tweet_ID' in the expansions above.",
placeholder="Choose what tweet details you want to see",
default=None,
advanced=True,
)
user_fields: TweetUserFieldsFilter | None = SchemaField(
description="Select what user information you want to see. To use this, you must first select one of these in expansions above:\n- 'Author_User_ID' for tweet authors\n- 'Mentioned_Usernames' for mentioned users\n- 'Reply_To_User_ID' for users being replied to\n- 'Referenced_Tweet_Author_ID' for authors of referenced tweets",
placeholder="Choose what user details you want to see",
default=None,
advanced=True,
)
class DMEventExpansionInputs(BlockSchema):
expansions: DMEventExpansionFilter | None = SchemaField(
description="Select expansions to include related data objects in the 'includes' section.",
placeholder="Enter expansions",
default=None,
advanced=True,
)
event_types: DMEventTypeFilter | None = SchemaField(
description="Select DM event types to include in the response.",
placeholder="Enter event types",
default=None,
advanced=True,
)
media_fields: DMMediaFieldFilter | None = SchemaField(
description="Select media fields to include in the response (requires expansions=attachments.media_keys).",
placeholder="Enter media fields",
default=None,
advanced=True,
)
tweet_fields: DMTweetFieldFilter | None = SchemaField(
description="Select tweet fields to include in the response (requires expansions=referenced_tweets.id).",
placeholder="Enter tweet fields",
default=None,
advanced=True,
)
user_fields: TweetUserFieldsFilter | None = SchemaField(
description="Select user fields to include in the response (requires expansions=sender_id or participant_ids).",
placeholder="Enter user fields",
default=None,
advanced=True,
)
class UserExpansionInputs(BlockSchema):
expansions: UserExpansionsFilter | None = SchemaField(
description="Choose what extra information you want to get with user data. Currently only 'pinned_tweet_id' is available to see a user's pinned tweet.",
placeholder="Select extra user information to include",
default=None,
advanced=True,
)
tweet_fields: TweetFieldsFilter | None = SchemaField(
description="Select what tweet information you want to see in pinned tweets. This only works if you select 'pinned_tweet_id' in expansions above.",
placeholder="Choose what details to see in pinned tweets",
default=None,
advanced=True,
)
user_fields: TweetUserFieldsFilter | None = SchemaField(
description="Select what user information you want to see, like username, bio, profile picture, etc.",
placeholder="Choose what user details you want to see",
default=None,
advanced=True,
)
class SpaceExpansionInputs(BlockSchema):
expansions: SpaceExpansionsFilter | None = SchemaField(
description="Choose additional information you want to get with your Twitter Spaces:\n- Select 'Invited_Users' to see who was invited\n- Select 'Speakers' to see who can speak\n- Select 'Creator' to get details about who made the Space\n- Select 'Hosts' to see who's hosting\n- Select 'Topics' to see Space topics",
placeholder="Pick what extra information you want to see about the Space",
default=None,
advanced=True,
)
space_fields: SpaceFieldsFilter | None = SchemaField(
description="Choose what Space details you want to see, such as:\n- Title\n- Start/End times\n- Number of participants\n- Language\n- State (live/scheduled)\n- And more",
placeholder="Choose what Space information you want to get",
default=SpaceFieldsFilter(Space_Title=True, Host_User_IDs=True),
advanced=True,
)
user_fields: TweetUserFieldsFilter | None = SchemaField(
description="Choose what user information you want to see. This works when you select any of these in expansions above:\n- 'Creator' for Space creator details\n- 'Hosts' for host information\n- 'Speakers' for speaker details\n- 'Invited_Users' for invited user information",
placeholder="Pick what details you want to see about the users",
default=None,
advanced=True,
)
class ListExpansionInputs(BlockSchema):
expansions: ListExpansionsFilter | None = SchemaField(
description="Choose what extra information you want to get with your Twitter Lists:\n- Select 'List_Owner_ID' to get details about who owns the list\n\nThis will let you see more details about the list owner when you also select user fields below.",
placeholder="Pick what extra list information you want to see",
default=ListExpansionsFilter(List_Owner_ID=True),
advanced=True,
)
user_fields: TweetUserFieldsFilter | None = SchemaField(
description="Choose what information you want to see about list owners. This only works when you select 'List_Owner_ID' in expansions above.\n\nYou can see things like:\n- Their username\n- Profile picture\n- Account details\n- And more",
placeholder="Select what details you want to see about list owners",
default=TweetUserFieldsFilter(User_ID=True, Username=True),
advanced=True,
)
list_fields: ListFieldsFilter | None = SchemaField(
description="Choose what information you want to see about the Twitter Lists themselves, such as:\n- List name\n- Description\n- Number of followers\n- Number of members\n- Whether it's private\n- Creation date\n- And more",
placeholder="Pick what list details you want to see",
default=ListFieldsFilter(Owner_ID=True),
advanced=True,
)
class TweetTimeWindowInputs(BlockSchema):
start_time: datetime | None = SchemaField(
description="Start time in YYYY-MM-DDTHH:mm:ssZ format",
placeholder="Enter start time",
default=None,
advanced=False,
)
end_time: datetime | None = SchemaField(
description="End time in YYYY-MM-DDTHH:mm:ssZ format",
placeholder="Enter end time",
default=None,
advanced=False,
)
since_id: str | None = SchemaField(
description="Returns results with Tweet ID greater than this (more recent than), we give priority to since_id over start_time",
placeholder="Enter since ID",
default=None,
advanced=True,
)
until_id: str | None = SchemaField(
description="Returns results with Tweet ID less than this (that is, older than), and used with since_id",
placeholder="Enter until ID",
default=None,
advanced=True,
)
sort_order: str | None = SchemaField(
description="Order of returned tweets (recency or relevancy)",
placeholder="Enter sort order",
default=None,
advanced=True,
)

View File

@ -0,0 +1,201 @@
# Todo : Add new Type support
# from typing import cast
# import tweepy
# from tweepy.client import Response
# from backend.blocks.twitter._serializer import IncludesSerializer, ResponseDataSerializer
# from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
# from backend.data.model import SchemaField
# from backend.blocks.twitter._builders import DMExpansionsBuilder
# from backend.blocks.twitter._types import DMEventExpansion, DMEventExpansionInputs, DMEventType, DMMediaField, DMTweetField, TweetUserFields
# from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
# from backend.blocks.twitter._auth import (
# TEST_CREDENTIALS,
# TEST_CREDENTIALS_INPUT,
# TwitterCredentials,
# TwitterCredentialsField,
# TwitterCredentialsInput,
# )
# Require Pro or Enterprise plan [Manual Testing Required]
# class TwitterGetDMEventsBlock(Block):
# """
# Gets a list of Direct Message events for the authenticated user
# """
# class Input(DMEventExpansionInputs):
# credentials: TwitterCredentialsInput = TwitterCredentialsField(
# ["dm.read", "offline.access", "user.read", "tweet.read"]
# )
# dm_conversation_id: str = SchemaField(
# description="The ID of the Direct Message conversation",
# placeholder="Enter conversation ID",
# required=True
# )
# max_results: int = SchemaField(
# description="Maximum number of results to return (1-100)",
# placeholder="Enter max results",
# advanced=True,
# default=10,
# )
# pagination_token: str = SchemaField(
# description="Token for pagination",
# placeholder="Enter pagination token",
# advanced=True,
# default=""
# )
# class Output(BlockSchema):
# # Common outputs
# event_ids: list[str] = SchemaField(description="DM Event IDs")
# event_texts: list[str] = SchemaField(description="DM Event text contents")
# event_types: list[str] = SchemaField(description="Types of DM events")
# next_token: str = SchemaField(description="Token for next page of results")
# # Complete outputs
# data: list[dict] = SchemaField(description="Complete DM events data")
# included: dict = SchemaField(description="Additional data requested via expansions")
# meta: dict = SchemaField(description="Metadata about the response")
# error: str = SchemaField(description="Error message if request failed")
# def __init__(self):
# super().__init__(
# id="dc37a6d4-a62e-11ef-a3a5-03061375737b",
# description="This block retrieves Direct Message events for the authenticated user.",
# categories={BlockCategory.SOCIAL},
# input_schema=TwitterGetDMEventsBlock.Input,
# output_schema=TwitterGetDMEventsBlock.Output,
# test_input={
# "dm_conversation_id": "1234567890",
# "max_results": 10,
# "credentials": TEST_CREDENTIALS_INPUT,
# "expansions": [],
# "event_types": [],
# "media_fields": [],
# "tweet_fields": [],
# "user_fields": []
# },
# test_credentials=TEST_CREDENTIALS,
# test_output=[
# ("event_ids", ["1346889436626259968"]),
# ("event_texts", ["Hello just you..."]),
# ("event_types", ["MessageCreate"]),
# ("next_token", None),
# ("data", [{"id": "1346889436626259968", "text": "Hello just you...", "event_type": "MessageCreate"}]),
# ("included", {}),
# ("meta", {}),
# ("error", "")
# ],
# test_mock={
# "get_dm_events": lambda *args, **kwargs: (
# [{"id": "1346889436626259968", "text": "Hello just you...", "event_type": "MessageCreate"}],
# {},
# {},
# ["1346889436626259968"],
# ["Hello just you..."],
# ["MessageCreate"],
# None
# )
# }
# )
# @staticmethod
# def get_dm_events(
# credentials: TwitterCredentials,
# dm_conversation_id: str,
# max_results: int,
# pagination_token: str,
# expansions: list[DMEventExpansion],
# event_types: list[DMEventType],
# media_fields: list[DMMediaField],
# tweet_fields: list[DMTweetField],
# user_fields: list[TweetUserFields]
# ):
# try:
# client = tweepy.Client(
# bearer_token=credentials.access_token.get_secret_value()
# )
# params = {
# "dm_conversation_id": dm_conversation_id,
# "max_results": max_results,
# "pagination_token": None if pagination_token == "" else pagination_token,
# "user_auth": False
# }
# params = (DMExpansionsBuilder(params)
# .add_expansions(expansions)
# .add_event_types(event_types)
# .add_media_fields(media_fields)
# .add_tweet_fields(tweet_fields)
# .add_user_fields(user_fields)
# .build())
# response = cast(Response, client.get_direct_message_events(**params))
# meta = {}
# event_ids = []
# event_texts = []
# event_types = []
# next_token = None
# if response.meta:
# meta = response.meta
# next_token = meta.get("next_token")
# included = IncludesSerializer.serialize(response.includes)
# data = ResponseDataSerializer.serialize_list(response.data)
# if response.data:
# event_ids = [str(item.id) for item in response.data]
# event_texts = [item.text if hasattr(item, "text") else None for item in response.data]
# event_types = [item.event_type for item in response.data]
# return data, included, meta, event_ids, event_texts, event_types, next_token
# raise Exception("No DM events found")
# except tweepy.TweepyException:
# raise
# def run(
# self,
# input_data: Input,
# *,
# credentials: TwitterCredentials,
# **kwargs,
# ) -> BlockOutput:
# try:
# event_data, included, meta, event_ids, event_texts, event_types, next_token = self.get_dm_events(
# credentials,
# input_data.dm_conversation_id,
# input_data.max_results,
# input_data.pagination_token,
# input_data.expansions,
# input_data.event_types,
# input_data.media_fields,
# input_data.tweet_fields,
# input_data.user_fields
# )
# if event_ids:
# yield "event_ids", event_ids
# if event_texts:
# yield "event_texts", event_texts
# if event_types:
# yield "event_types", event_types
# if next_token:
# yield "next_token", next_token
# if event_data:
# yield "data", event_data
# if included:
# yield "included", included
# if meta:
# yield "meta", meta
# except Exception as e:
# yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,260 @@
# Todo : Add new Type support
# from typing import cast
# import tweepy
# from tweepy.client import Response
# from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
# from backend.data.model import SchemaField
# from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
# from backend.blocks.twitter._auth import (
# TEST_CREDENTIALS,
# TEST_CREDENTIALS_INPUT,
# TwitterCredentials,
# TwitterCredentialsField,
# TwitterCredentialsInput,
# )
# Pro and Enterprise plan [Manual Testing Required]
# class TwitterSendDirectMessageBlock(Block):
# """
# Sends a direct message to a Twitter user
# """
# class Input(BlockSchema):
# credentials: TwitterCredentialsInput = TwitterCredentialsField(
# ["offline.access", "direct_messages.write"]
# )
# participant_id: str = SchemaField(
# description="The User ID of the account to send DM to",
# placeholder="Enter recipient user ID",
# default="",
# advanced=False
# )
# dm_conversation_id: str = SchemaField(
# description="The conversation ID to send message to",
# placeholder="Enter conversation ID",
# default="",
# advanced=False
# )
# text: str = SchemaField(
# description="Text of the Direct Message (up to 10,000 characters)",
# placeholder="Enter message text",
# default="",
# advanced=False
# )
# media_id: str = SchemaField(
# description="Media ID to attach to the message",
# placeholder="Enter media ID",
# default=""
# )
# class Output(BlockSchema):
# dm_event_id: str = SchemaField(description="ID of the sent direct message")
# dm_conversation_id_: str = SchemaField(description="ID of the conversation")
# error: str = SchemaField(description="Error message if sending failed")
# def __init__(self):
# super().__init__(
# id="f32f2786-a62e-11ef-a93d-a3ef199dde7f",
# description="This block sends a direct message to a specified Twitter user.",
# categories={BlockCategory.SOCIAL},
# input_schema=TwitterSendDirectMessageBlock.Input,
# output_schema=TwitterSendDirectMessageBlock.Output,
# test_input={
# "participant_id": "783214",
# "dm_conversation_id": "",
# "text": "Hello from Twitter API",
# "media_id": "",
# "credentials": TEST_CREDENTIALS_INPUT
# },
# test_credentials=TEST_CREDENTIALS,
# test_output=[
# ("dm_event_id", "0987654321"),
# ("dm_conversation_id_", "1234567890"),
# ("error", "")
# ],
# test_mock={
# "send_direct_message": lambda *args, **kwargs: (
# "0987654321",
# "1234567890"
# )
# },
# )
# @staticmethod
# def send_direct_message(
# credentials: TwitterCredentials,
# participant_id: str,
# dm_conversation_id: str,
# text: str,
# media_id: str
# ):
# try:
# client = tweepy.Client(
# bearer_token=credentials.access_token.get_secret_value()
# )
# response = cast(
# Response,
# client.create_direct_message(
# participant_id=None if participant_id == "" else participant_id,
# dm_conversation_id=None if dm_conversation_id == "" else dm_conversation_id,
# text=None if text == "" else text,
# media_id=None if media_id == "" else media_id,
# user_auth=False
# )
# )
# if not response.data:
# raise Exception("Failed to send direct message")
# return response.data["dm_event_id"], response.data["dm_conversation_id"]
# except tweepy.TweepyException:
# raise
# except Exception as e:
# print(f"Unexpected error: {str(e)}")
# raise
# def run(
# self,
# input_data: Input,
# *,
# credentials: TwitterCredentials,
# **kwargs,
# ) -> BlockOutput:
# try:
# dm_event_id, dm_conversation_id = self.send_direct_message(
# credentials,
# input_data.participant_id,
# input_data.dm_conversation_id,
# input_data.text,
# input_data.media_id
# )
# yield "dm_event_id", dm_event_id
# yield "dm_conversation_id", dm_conversation_id
# except Exception as e:
# yield "error", handle_tweepy_exception(e)
# class TwitterCreateDMConversationBlock(Block):
# """
# Creates a new group direct message conversation on Twitter
# """
# class Input(BlockSchema):
# credentials: TwitterCredentialsInput = TwitterCredentialsField(
# ["offline.access", "dm.write","dm.read","tweet.read","user.read"]
# )
# participant_ids: list[str] = SchemaField(
# description="Array of User IDs to create conversation with (max 50)",
# placeholder="Enter participant user IDs",
# default=[],
# advanced=False
# )
# text: str = SchemaField(
# description="Text of the Direct Message (up to 10,000 characters)",
# placeholder="Enter message text",
# default="",
# advanced=False
# )
# media_id: str = SchemaField(
# description="Media ID to attach to the message",
# placeholder="Enter media ID",
# default="",
# advanced=False
# )
# class Output(BlockSchema):
# dm_event_id: str = SchemaField(description="ID of the sent direct message")
# dm_conversation_id: str = SchemaField(description="ID of the conversation")
# error: str = SchemaField(description="Error message if sending failed")
# def __init__(self):
# super().__init__(
# id="ec11cabc-a62e-11ef-8c0e-3fe37ba2ec92",
# description="This block creates a new group DM conversation with specified Twitter users.",
# categories={BlockCategory.SOCIAL},
# input_schema=TwitterCreateDMConversationBlock.Input,
# output_schema=TwitterCreateDMConversationBlock.Output,
# test_input={
# "participant_ids": ["783214", "2244994945"],
# "text": "Hello from Twitter API",
# "media_id": "",
# "credentials": TEST_CREDENTIALS_INPUT
# },
# test_credentials=TEST_CREDENTIALS,
# test_output=[
# ("dm_event_id", "0987654321"),
# ("dm_conversation_id", "1234567890"),
# ("error", "")
# ],
# test_mock={
# "create_dm_conversation": lambda *args, **kwargs: (
# "0987654321",
# "1234567890"
# )
# },
# )
# @staticmethod
# def create_dm_conversation(
# credentials: TwitterCredentials,
# participant_ids: list[str],
# text: str,
# media_id: str
# ):
# try:
# client = tweepy.Client(
# bearer_token=credentials.access_token.get_secret_value()
# )
# response = cast(
# Response,
# client.create_direct_message_conversation(
# participant_ids=participant_ids,
# text=None if text == "" else text,
# media_id=None if media_id == "" else media_id,
# user_auth=False
# )
# )
# if not response.data:
# raise Exception("Failed to create DM conversation")
# return response.data["dm_event_id"], response.data["dm_conversation_id"]
# except tweepy.TweepyException:
# raise
# except Exception as e:
# print(f"Unexpected error: {str(e)}")
# raise
# def run(
# self,
# input_data: Input,
# *,
# credentials: TwitterCredentials,
# **kwargs,
# ) -> BlockOutput:
# try:
# dm_event_id, dm_conversation_id = self.create_dm_conversation(
# credentials,
# input_data.participant_ids,
# input_data.text,
# input_data.media_id
# )
# yield "dm_event_id", dm_event_id
# yield "dm_conversation_id", dm_conversation_id
# except Exception as e:
# yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,470 @@
# from typing import cast
import tweepy
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
# from backend.blocks.twitter._builders import UserExpansionsBuilder
# from backend.blocks.twitter._types import TweetFields, TweetUserFields, UserExpansionInputs, UserExpansions
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
# from tweepy.client import Response
class TwitterUnfollowListBlock(Block):
"""
Unfollows a Twitter list for the authenticated user
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["follows.write", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List to unfollow",
placeholder="Enter list ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the unfollow was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="1f43310a-a62f-11ef-8276-2b06a1bbae1a",
description="This block unfollows a specified Twitter list for the authenticated user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterUnfollowListBlock.Input,
output_schema=TwitterUnfollowListBlock.Output,
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"unfollow_list": lambda *args, **kwargs: True},
)
@staticmethod
def unfollow_list(credentials: TwitterCredentials, list_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.unfollow_list(list_id=list_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.unfollow_list(credentials, input_data.list_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterFollowListBlock(Block):
"""
Follows a Twitter list for the authenticated user
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "list.write", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List to follow",
placeholder="Enter list ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the follow was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="03d8acf6-a62f-11ef-b17f-b72b04a09e79",
description="This block follows a specified Twitter list for the authenticated user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterFollowListBlock.Input,
output_schema=TwitterFollowListBlock.Output,
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"follow_list": lambda *args, **kwargs: True},
)
@staticmethod
def follow_list(credentials: TwitterCredentials, list_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.follow_list(list_id=list_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.follow_list(credentials, input_data.list_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
# Enterprise Level [Need to do Manual testing], There is a high possibility that we might get error in this
# Needs Type Input in this
# class TwitterListGetFollowersBlock(Block):
# """
# Gets followers of a specified Twitter list
# """
# class Input(UserExpansionInputs):
# credentials: TwitterCredentialsInput = TwitterCredentialsField(
# ["tweet.read","users.read", "list.read", "offline.access"]
# )
# list_id: str = SchemaField(
# description="The ID of the List to get followers for",
# placeholder="Enter list ID",
# required=True
# )
# max_results: int = SchemaField(
# description="Max number of results per page (1-100)",
# placeholder="Enter max results",
# default=10,
# advanced=True,
# )
# pagination_token: str = SchemaField(
# description="Token for pagination",
# placeholder="Enter pagination token",
# default="",
# advanced=True,
# )
# class Output(BlockSchema):
# user_ids: list[str] = SchemaField(description="List of user IDs of followers")
# usernames: list[str] = SchemaField(description="List of usernames of followers")
# next_token: str = SchemaField(description="Token for next page of results")
# data: list[dict] = SchemaField(description="Complete follower data")
# included: dict = SchemaField(description="Additional data requested via expansions")
# meta: dict = SchemaField(description="Metadata about the response")
# error: str = SchemaField(description="Error message if the request failed")
# def __init__(self):
# super().__init__(
# id="16b289b4-a62f-11ef-95d4-bb29b849eb99",
# description="This block retrieves followers of a specified Twitter list.",
# categories={BlockCategory.SOCIAL},
# input_schema=TwitterListGetFollowersBlock.Input,
# output_schema=TwitterListGetFollowersBlock.Output,
# test_input={
# "list_id": "123456789",
# "max_results": 10,
# "pagination_token": None,
# "credentials": TEST_CREDENTIALS_INPUT,
# "expansions": [],
# "tweet_fields": [],
# "user_fields": []
# },
# test_credentials=TEST_CREDENTIALS,
# test_output=[
# ("user_ids", ["2244994945"]),
# ("usernames", ["testuser"]),
# ("next_token", None),
# ("data", {"followers": [{"id": "2244994945", "username": "testuser"}]}),
# ("included", {}),
# ("meta", {}),
# ("error", "")
# ],
# test_mock={
# "get_list_followers": lambda *args, **kwargs: ({
# "followers": [{"id": "2244994945", "username": "testuser"}]
# }, {}, {}, ["2244994945"], ["testuser"], None)
# }
# )
# @staticmethod
# def get_list_followers(
# credentials: TwitterCredentials,
# list_id: str,
# max_results: int,
# pagination_token: str,
# expansions: list[UserExpansions],
# tweet_fields: list[TweetFields],
# user_fields: list[TweetUserFields]
# ):
# try:
# client = tweepy.Client(
# bearer_token=credentials.access_token.get_secret_value(),
# )
# params = {
# "id": list_id,
# "max_results": max_results,
# "pagination_token": None if pagination_token == "" else pagination_token,
# "user_auth": False
# }
# params = (UserExpansionsBuilder(params)
# .add_expansions(expansions)
# .add_tweet_fields(tweet_fields)
# .add_user_fields(user_fields)
# .build())
# response = cast(
# Response,
# client.get_list_followers(**params)
# )
# meta = {}
# user_ids = []
# usernames = []
# next_token = None
# if response.meta:
# meta = response.meta
# next_token = meta.get("next_token")
# included = IncludesSerializer.serialize(response.includes)
# data = ResponseDataSerializer.serialize_list(response.data)
# if response.data:
# user_ids = [str(item.id) for item in response.data]
# usernames = [item.username for item in response.data]
# return data, included, meta, user_ids, usernames, next_token
# raise Exception("No followers found")
# except tweepy.TweepyException:
# raise
# def run(
# self,
# input_data: Input,
# *,
# credentials: TwitterCredentials,
# **kwargs,
# ) -> BlockOutput:
# try:
# followers_data, included, meta, user_ids, usernames, next_token = self.get_list_followers(
# credentials,
# input_data.list_id,
# input_data.max_results,
# input_data.pagination_token,
# input_data.expansions,
# input_data.tweet_fields,
# input_data.user_fields
# )
# if user_ids:
# yield "user_ids", user_ids
# if usernames:
# yield "usernames", usernames
# if next_token:
# yield "next_token", next_token
# if followers_data:
# yield "data", followers_data
# if included:
# yield "included", included
# if meta:
# yield "meta", meta
# except Exception as e:
# yield "error", handle_tweepy_exception(e)
# class TwitterGetFollowedListsBlock(Block):
# """
# Gets lists followed by a specified Twitter user
# """
# class Input(UserExpansionInputs):
# credentials: TwitterCredentialsInput = TwitterCredentialsField(
# ["follows.read", "users.read", "list.read", "offline.access"]
# )
# user_id: str = SchemaField(
# description="The user ID whose followed Lists to retrieve",
# placeholder="Enter user ID",
# required=True
# )
# max_results: int = SchemaField(
# description="Max number of results per page (1-100)",
# placeholder="Enter max results",
# default=10,
# advanced=True,
# )
# pagination_token: str = SchemaField(
# description="Token for pagination",
# placeholder="Enter pagination token",
# default="",
# advanced=True,
# )
# class Output(BlockSchema):
# list_ids: list[str] = SchemaField(description="List of list IDs")
# list_names: list[str] = SchemaField(description="List of list names")
# data: list[dict] = SchemaField(description="Complete list data")
# includes: dict = SchemaField(description="Additional data requested via expansions")
# meta: dict = SchemaField(description="Metadata about the response")
# next_token: str = SchemaField(description="Token for next page of results")
# error: str = SchemaField(description="Error message if the request failed")
# def __init__(self):
# super().__init__(
# id="0e18bbfc-a62f-11ef-94fa-1f1e174b809e",
# description="This block retrieves all Lists a specified user follows.",
# categories={BlockCategory.SOCIAL},
# input_schema=TwitterGetFollowedListsBlock.Input,
# output_schema=TwitterGetFollowedListsBlock.Output,
# test_input={
# "user_id": "123456789",
# "max_results": 10,
# "pagination_token": None,
# "credentials": TEST_CREDENTIALS_INPUT,
# "expansions": [],
# "tweet_fields": [],
# "user_fields": []
# },
# test_credentials=TEST_CREDENTIALS,
# test_output=[
# ("list_ids", ["12345"]),
# ("list_names", ["Test List"]),
# ("data", {"followed_lists": [{"id": "12345", "name": "Test List"}]}),
# ("includes", {}),
# ("meta", {}),
# ("next_token", None),
# ("error", "")
# ],
# test_mock={
# "get_followed_lists": lambda *args, **kwargs: ({
# "followed_lists": [{"id": "12345", "name": "Test List"}]
# }, {}, {}, ["12345"], ["Test List"], None)
# }
# )
# @staticmethod
# def get_followed_lists(
# credentials: TwitterCredentials,
# user_id: str,
# max_results: int,
# pagination_token: str,
# expansions: list[UserExpansions],
# tweet_fields: list[TweetFields],
# user_fields: list[TweetUserFields]
# ):
# try:
# client = tweepy.Client(
# bearer_token=credentials.access_token.get_secret_value(),
# )
# params = {
# "id": user_id,
# "max_results": max_results,
# "pagination_token": None if pagination_token == "" else pagination_token,
# "user_auth": False
# }
# params = (UserExpansionsBuilder(params)
# .add_expansions(expansions)
# .add_tweet_fields(tweet_fields)
# .add_user_fields(user_fields)
# .build())
# response = cast(
# Response,
# client.get_followed_lists(**params)
# )
# meta = {}
# list_ids = []
# list_names = []
# next_token = None
# if response.meta:
# meta = response.meta
# next_token = meta.get("next_token")
# included = IncludesSerializer.serialize(response.includes)
# data = ResponseDataSerializer.serialize_list(response.data)
# if response.data:
# list_ids = [str(item.id) for item in response.data]
# list_names = [item.name for item in response.data]
# return data, included, meta, list_ids, list_names, next_token
# raise Exception("No followed lists found")
# except tweepy.TweepyException:
# raise
# def run(
# self,
# input_data: Input,
# *,
# credentials: TwitterCredentials,
# **kwargs,
# ) -> BlockOutput:
# try:
# lists_data, included, meta, list_ids, list_names, next_token = self.get_followed_lists(
# credentials,
# input_data.user_id,
# input_data.max_results,
# input_data.pagination_token,
# input_data.expansions,
# input_data.tweet_fields,
# input_data.user_fields
# )
# if list_ids:
# yield "list_ids", list_ids
# if list_names:
# yield "list_names", list_names
# if next_token:
# yield "next_token", next_token
# if lists_data:
# yield "data", lists_data
# if included:
# yield "includes", included
# if meta:
# yield "meta", meta
# except Exception as e:
# yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,348 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import ListExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ListExpansionInputs,
ListExpansionsFilter,
ListFieldsFilter,
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterGetListBlock(Block):
"""
Gets information about a Twitter List specified by ID
"""
class Input(ListExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List to lookup",
placeholder="Enter list ID",
required=True,
)
class Output(BlockSchema):
# Common outputs
id: str = SchemaField(description="ID of the Twitter List")
name: str = SchemaField(description="Name of the Twitter List")
owner_id: str = SchemaField(description="ID of the List owner")
owner_username: str = SchemaField(description="Username of the List owner")
# Complete outputs
data: dict = SchemaField(description="Complete list data")
included: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Metadata about the response")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="34ebc80a-a62f-11ef-9c2a-3fcab6c07079",
description="This block retrieves information about a specified Twitter List.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetListBlock.Input,
output_schema=TwitterGetListBlock.Output,
test_input={
"list_id": "84839422",
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"list_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", "84839422"),
("name", "Official Twitter Accounts"),
("owner_id", "2244994945"),
("owner_username", "TwitterAPI"),
("data", {"id": "84839422", "name": "Official Twitter Accounts"}),
],
test_mock={
"get_list": lambda *args, **kwargs: (
{"id": "84839422", "name": "Official Twitter Accounts"},
{},
{},
"2244994945",
"TwitterAPI",
)
},
)
@staticmethod
def get_list(
credentials: TwitterCredentials,
list_id: str,
expansions: ListExpansionsFilter | None,
user_fields: TweetUserFieldsFilter | None,
list_fields: ListFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {"id": list_id, "user_auth": False}
params = (
ListExpansionsBuilder(params)
.add_expansions(expansions)
.add_user_fields(user_fields)
.add_list_fields(list_fields)
.build()
)
response = cast(Response, client.get_list(**params))
meta = {}
owner_id = ""
owner_username = ""
included = {}
if response.includes:
included = IncludesSerializer.serialize(response.includes)
if "users" in included:
owner_id = str(included["users"][0]["id"])
owner_username = included["users"][0]["username"]
if response.meta:
meta = response.meta
if response.data:
data_dict = ResponseDataSerializer.serialize_dict(response.data)
return data_dict, included, meta, owner_id, owner_username
raise Exception("List not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
list_data, included, meta, owner_id, owner_username = self.get_list(
credentials,
input_data.list_id,
input_data.expansions,
input_data.user_fields,
input_data.list_fields,
)
yield "id", str(list_data["id"])
yield "name", list_data["name"]
if owner_id:
yield "owner_id", owner_id
if owner_username:
yield "owner_username", owner_username
yield "data", {"id": list_data["id"], "name": list_data["name"]}
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetOwnedListsBlock(Block):
"""
Gets all Lists owned by the specified user
"""
class Input(ListExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "list.read", "offline.access"]
)
user_id: str = SchemaField(
description="The user ID whose owned Lists to retrieve",
placeholder="Enter user ID",
required=True,
)
max_results: int | None = SchemaField(
description="Maximum number of results per page (1-100)",
placeholder="Enter max results (default 100)",
advanced=True,
default=10,
)
pagination_token: str | None = SchemaField(
description="Token for pagination",
placeholder="Enter pagination token",
advanced=True,
default="",
)
class Output(BlockSchema):
# Common outputs
list_ids: list[str] = SchemaField(description="List ids of the owned lists")
list_names: list[str] = SchemaField(description="List names of the owned lists")
next_token: str = SchemaField(description="Token for next page of results")
# Complete outputs
data: list[dict] = SchemaField(description="Complete owned lists data")
included: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Metadata about the response")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="2b6bdb26-a62f-11ef-a9ce-ff89c2568726",
description="This block retrieves all Lists owned by a specified Twitter user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetOwnedListsBlock.Input,
output_schema=TwitterGetOwnedListsBlock.Output,
test_input={
"user_id": "2244994945",
"max_results": 10,
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"list_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("list_ids", ["84839422"]),
("list_names", ["Official Twitter Accounts"]),
("data", [{"id": "84839422", "name": "Official Twitter Accounts"}]),
],
test_mock={
"get_owned_lists": lambda *args, **kwargs: (
[{"id": "84839422", "name": "Official Twitter Accounts"}],
{},
{},
["84839422"],
["Official Twitter Accounts"],
None,
)
},
)
@staticmethod
def get_owned_lists(
credentials: TwitterCredentials,
user_id: str,
max_results: int | None,
pagination_token: str | None,
expansions: ListExpansionsFilter | None,
user_fields: TweetUserFieldsFilter | None,
list_fields: ListFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": user_id,
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
ListExpansionsBuilder(params)
.add_expansions(expansions)
.add_user_fields(user_fields)
.add_list_fields(list_fields)
.build()
)
response = cast(Response, client.get_owned_lists(**params))
meta = {}
included = {}
list_ids = []
list_names = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
if response.includes:
included = IncludesSerializer.serialize(response.includes)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
list_ids = [
str(item.id) for item in response.data if hasattr(item, "id")
]
list_names = [
item.name for item in response.data if hasattr(item, "name")
]
return data, included, meta, list_ids, list_names, next_token
raise Exception("User have no owned list")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
list_data, included, meta, list_ids, list_names, next_token = (
self.get_owned_lists(
credentials,
input_data.user_id,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.user_fields,
input_data.list_fields,
)
)
if list_ids:
yield "list_ids", list_ids
if list_names:
yield "list_names", list_names
if next_token:
yield "next_token", next_token
if list_data:
yield "data", list_data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,527 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import (
ListExpansionsBuilder,
UserExpansionsBuilder,
)
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ListExpansionInputs,
ListExpansionsFilter,
ListFieldsFilter,
TweetFieldsFilter,
TweetUserFieldsFilter,
UserExpansionInputs,
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterRemoveListMemberBlock(Block):
"""
Removes a member from a Twitter List that the authenticated user owns
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["list.write", "users.read", "tweet.read", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List to remove the member from",
placeholder="Enter list ID",
required=True,
)
user_id: str = SchemaField(
description="The ID of the user to remove from the List",
placeholder="Enter user ID to remove",
required=True,
)
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the member was successfully removed"
)
error: str = SchemaField(description="Error message if the removal failed")
def __init__(self):
super().__init__(
id="5a3d1320-a62f-11ef-b7ce-a79e7656bcb0",
description="This block removes a specified user from a Twitter List owned by the authenticated user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterRemoveListMemberBlock.Input,
output_schema=TwitterRemoveListMemberBlock.Output,
test_input={
"list_id": "123456789",
"user_id": "987654321",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"remove_list_member": lambda *args, **kwargs: True},
)
@staticmethod
def remove_list_member(credentials: TwitterCredentials, list_id: str, user_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.remove_list_member(id=list_id, user_id=user_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
except Exception:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.remove_list_member(
credentials, input_data.list_id, input_data.user_id
)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterAddListMemberBlock(Block):
"""
Adds a member to a Twitter List that the authenticated user owns
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["list.write", "users.read", "tweet.read", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List to add the member to",
placeholder="Enter list ID",
required=True,
)
user_id: str = SchemaField(
description="The ID of the user to add to the List",
placeholder="Enter user ID to add",
required=True,
)
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the member was successfully added"
)
error: str = SchemaField(description="Error message if the addition failed")
def __init__(self):
super().__init__(
id="3ee8284e-a62f-11ef-84e4-8f6e2cbf0ddb",
description="This block adds a specified user to a Twitter List owned by the authenticated user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterAddListMemberBlock.Input,
output_schema=TwitterAddListMemberBlock.Output,
test_input={
"list_id": "123456789",
"user_id": "987654321",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"add_list_member": lambda *args, **kwargs: True},
)
@staticmethod
def add_list_member(credentials: TwitterCredentials, list_id: str, user_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.add_list_member(id=list_id, user_id=user_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
except Exception:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.add_list_member(
credentials, input_data.list_id, input_data.user_id
)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetListMembersBlock(Block):
"""
Gets the members of a specified Twitter List
"""
class Input(UserExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["list.read", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List to get members from",
placeholder="Enter list ID",
required=True,
)
max_results: int | None = SchemaField(
description="Maximum number of results per page (1-100)",
placeholder="Enter max results",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for pagination of results",
placeholder="Enter pagination token",
default="",
advanced=True,
)
class Output(BlockSchema):
ids: list[str] = SchemaField(description="List of member user IDs")
usernames: list[str] = SchemaField(description="List of member usernames")
next_token: str = SchemaField(description="Next token for pagination")
data: list[dict] = SchemaField(
description="Complete user data for list members"
)
included: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Metadata including pagination info")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="4dba046e-a62f-11ef-b69a-87240c84b4c7",
description="This block retrieves the members of a specified Twitter List.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetListMembersBlock.Input,
output_schema=TwitterGetListMembersBlock.Output,
test_input={
"list_id": "123456789",
"max_results": 2,
"pagination_token": None,
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["12345", "67890"]),
("usernames", ["testuser1", "testuser2"]),
(
"data",
[
{"id": "12345", "username": "testuser1"},
{"id": "67890", "username": "testuser2"},
],
),
],
test_mock={
"get_list_members": lambda *args, **kwargs: (
["12345", "67890"],
["testuser1", "testuser2"],
[
{"id": "12345", "username": "testuser1"},
{"id": "67890", "username": "testuser2"},
],
{},
{},
None,
)
},
)
@staticmethod
def get_list_members(
credentials: TwitterCredentials,
list_id: str,
max_results: int | None,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": list_id,
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
UserExpansionsBuilder(params)
.add_expansions(expansions)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_list_members(**params))
meta = {}
included = {}
next_token = None
user_ids = []
usernames = []
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
if response.includes:
included = IncludesSerializer.serialize(response.includes)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
user_ids = [str(user.id) for user in response.data]
usernames = [user.username for user in response.data]
return user_ids, usernames, data, included, meta, next_token
raise Exception("List members not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, usernames, data, included, meta, next_token = self.get_list_members(
credentials,
input_data.list_id,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,
)
if ids:
yield "ids", ids
if usernames:
yield "usernames", usernames
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetListMembershipsBlock(Block):
"""
Gets all Lists that a specified user is a member of
"""
class Input(ListExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["list.read", "offline.access"]
)
user_id: str = SchemaField(
description="The ID of the user whose List memberships to retrieve",
placeholder="Enter user ID",
required=True,
)
max_results: int | None = SchemaField(
description="Maximum number of results per page (1-100)",
placeholder="Enter max results",
advanced=True,
default=10,
)
pagination_token: str | None = SchemaField(
description="Token for pagination of results",
placeholder="Enter pagination token",
advanced=True,
default="",
)
class Output(BlockSchema):
list_ids: list[str] = SchemaField(description="List of list IDs")
next_token: str = SchemaField(description="Next token for pagination")
data: list[dict] = SchemaField(description="List membership data")
included: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Metadata about pagination")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="46e6429c-a62f-11ef-81c0-2b55bc7823ba",
description="This block retrieves all Lists that a specified user is a member of.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetListMembershipsBlock.Input,
output_schema=TwitterGetListMembershipsBlock.Output,
test_input={
"user_id": "123456789",
"max_results": 1,
"pagination_token": None,
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"list_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("list_ids", ["84839422"]),
("data", [{"id": "84839422"}]),
],
test_mock={
"get_list_memberships": lambda *args, **kwargs: (
[{"id": "84839422"}],
{},
{},
["84839422"],
None,
)
},
)
@staticmethod
def get_list_memberships(
credentials: TwitterCredentials,
user_id: str,
max_results: int | None,
pagination_token: str | None,
expansions: ListExpansionsFilter | None,
user_fields: TweetUserFieldsFilter | None,
list_fields: ListFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": user_id,
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
ListExpansionsBuilder(params)
.add_expansions(expansions)
.add_user_fields(user_fields)
.add_list_fields(list_fields)
.build()
)
response = cast(Response, client.get_list_memberships(**params))
meta = {}
included = {}
next_token = None
list_ids = []
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
if response.includes:
included = IncludesSerializer.serialize(response.includes)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
list_ids = [str(lst.id) for lst in response.data]
return data, included, meta, list_ids, next_token
raise Exception("List memberships not found")
except tweepy.TweepyException:
raise
except Exception:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
data, included, meta, list_ids, next_token = self.get_list_memberships(
credentials,
input_data.user_id,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.user_fields,
input_data.list_fields,
)
if list_ids:
yield "list_ids", list_ids
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,217 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import TweetExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ExpansionFilter,
TweetExpansionInputs,
TweetFieldsFilter,
TweetMediaFieldsFilter,
TweetPlaceFieldsFilter,
TweetPollFieldsFilter,
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterGetListTweetsBlock(Block):
"""
Gets tweets from a specified Twitter list
"""
class Input(TweetExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List whose Tweets you would like to retrieve",
placeholder="Enter list ID",
required=True,
)
max_results: int | None = SchemaField(
description="Maximum number of results per page (1-100)",
placeholder="Enter max results",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for paginating through results",
placeholder="Enter pagination token",
default="",
advanced=True,
)
class Output(BlockSchema):
# Common outputs
tweet_ids: list[str] = SchemaField(description="List of tweet IDs")
texts: list[str] = SchemaField(description="List of tweet texts")
next_token: str = SchemaField(description="Token for next page of results")
# Complete outputs
data: list[dict] = SchemaField(description="Complete list tweets data")
included: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(
description="Response metadata including pagination tokens"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="6657edb0-a62f-11ef-8c10-0326d832467d",
description="This block retrieves tweets from a specified Twitter list.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetListTweetsBlock.Input,
output_schema=TwitterGetListTweetsBlock.Output,
test_input={
"list_id": "84839422",
"max_results": 1,
"pagination_token": None,
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"media_fields": None,
"place_fields": None,
"poll_fields": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("tweet_ids", ["1234567890"]),
("texts", ["Test tweet"]),
("data", [{"id": "1234567890", "text": "Test tweet"}]),
],
test_mock={
"get_list_tweets": lambda *args, **kwargs: (
[{"id": "1234567890", "text": "Test tweet"}],
{},
{},
["1234567890"],
["Test tweet"],
None,
)
},
)
@staticmethod
def get_list_tweets(
credentials: TwitterCredentials,
list_id: str,
max_results: int | None,
pagination_token: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": list_id,
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_list_tweets(**params))
meta = {}
included = {}
tweet_ids = []
texts = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
if response.includes:
included = IncludesSerializer.serialize(response.includes)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
tweet_ids = [str(item.id) for item in response.data]
texts = [item.text for item in response.data]
return data, included, meta, tweet_ids, texts, next_token
raise Exception("No tweets found in this list")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
list_data, included, meta, tweet_ids, texts, next_token = (
self.get_list_tweets(
credentials,
input_data.list_id,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
)
if tweet_ids:
yield "tweet_ids", tweet_ids
if texts:
yield "texts", texts
if next_token:
yield "next_token", next_token
if list_data:
yield "data", list_data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,278 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterDeleteListBlock(Block):
"""
Deletes a Twitter List owned by the authenticated user
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["list.write", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List to be deleted",
placeholder="Enter list ID",
required=True,
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the deletion was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="843c6892-a62f-11ef-a5c8-b71239a78d3b",
description="This block deletes a specified Twitter List owned by the authenticated user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterDeleteListBlock.Input,
output_schema=TwitterDeleteListBlock.Output,
test_input={"list_id": "1234567890", "credentials": TEST_CREDENTIALS_INPUT},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"delete_list": lambda *args, **kwargs: True},
)
@staticmethod
def delete_list(credentials: TwitterCredentials, list_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.delete_list(id=list_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
except Exception:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.delete_list(credentials, input_data.list_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterUpdateListBlock(Block):
"""
Updates a Twitter List owned by the authenticated user
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["list.write", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List to be updated",
placeholder="Enter list ID",
advanced=False,
)
name: str | None = SchemaField(
description="New name for the List",
placeholder="Enter list name",
default="",
advanced=False,
)
description: str | None = SchemaField(
description="New description for the List",
placeholder="Enter list description",
default="",
advanced=False,
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the update was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="7d12630a-a62f-11ef-90c9-8f5a996612c3",
description="This block updates a specified Twitter List owned by the authenticated user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterUpdateListBlock.Input,
output_schema=TwitterUpdateListBlock.Output,
test_input={
"list_id": "1234567890",
"name": "Updated List Name",
"description": "Updated List Description",
"private": True,
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"update_list": lambda *args, **kwargs: True},
)
@staticmethod
def update_list(
credentials: TwitterCredentials,
list_id: str,
name: str | None,
description: str | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.update_list(
id=list_id,
name=None if name == "" else name,
description=None if description == "" else description,
user_auth=False,
)
return True
except tweepy.TweepyException:
raise
except Exception:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.update_list(
credentials, input_data.list_id, input_data.name, input_data.description
)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterCreateListBlock(Block):
"""
Creates a Twitter List owned by the authenticated user
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["list.write", "offline.access"]
)
name: str = SchemaField(
description="The name of the List to be created",
placeholder="Enter list name",
advanced=False,
default="",
)
description: str | None = SchemaField(
description="Description of the List",
placeholder="Enter list description",
advanced=False,
default="",
)
private: bool = SchemaField(
description="Whether the List should be private",
advanced=False,
default=False,
)
class Output(BlockSchema):
url: str = SchemaField(description="URL of the created list")
list_id: str = SchemaField(description="ID of the created list")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="724148ba-a62f-11ef-89ba-5349b813ef5f",
description="This block creates a new Twitter List for the authenticated user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterCreateListBlock.Input,
output_schema=TwitterCreateListBlock.Output,
test_input={
"name": "New List Name",
"description": "New List Description",
"private": True,
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("list_id", "1234567890"),
("url", "https://twitter.com/i/lists/1234567890"),
],
test_mock={"create_list": lambda *args, **kwargs: ("1234567890")},
)
@staticmethod
def create_list(
credentials: TwitterCredentials,
name: str,
description: str | None,
private: bool,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
response = cast(
Response,
client.create_list(
name=None if name == "" else name,
description=None if description == "" else description,
private=private,
user_auth=False,
),
)
list_id = str(response.data["id"])
return list_id
except tweepy.TweepyException:
raise
except Exception:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
list_id = self.create_list(
credentials, input_data.name, input_data.description, input_data.private
)
yield "list_id", list_id
yield "url", f"https://twitter.com/i/lists/{list_id}"
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,285 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import ListExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ListExpansionInputs,
ListExpansionsFilter,
ListFieldsFilter,
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterUnpinListBlock(Block):
"""
Enables the authenticated user to unpin a List.
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["list.write", "users.read", "tweet.read", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List to unpin",
placeholder="Enter list ID",
required=True,
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the unpin was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="a099c034-a62f-11ef-9622-47d0ceb73555",
description="This block allows the authenticated user to unpin a specified List.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterUnpinListBlock.Input,
output_schema=TwitterUnpinListBlock.Output,
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"unpin_list": lambda *args, **kwargs: True},
)
@staticmethod
def unpin_list(credentials: TwitterCredentials, list_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.unpin_list(list_id=list_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
except Exception:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.unpin_list(credentials, input_data.list_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterPinListBlock(Block):
"""
Enables the authenticated user to pin a List.
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["list.write", "users.read", "tweet.read", "offline.access"]
)
list_id: str = SchemaField(
description="The ID of the List to pin",
placeholder="Enter list ID",
required=True,
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the pin was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="8ec16e48-a62f-11ef-9f35-f3d6de43a802",
description="This block allows the authenticated user to pin a specified List.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterPinListBlock.Input,
output_schema=TwitterPinListBlock.Output,
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"pin_list": lambda *args, **kwargs: True},
)
@staticmethod
def pin_list(credentials: TwitterCredentials, list_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.pin_list(list_id=list_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
except Exception:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.pin_list(credentials, input_data.list_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetPinnedListsBlock(Block):
"""
Returns the Lists pinned by the authenticated user.
"""
class Input(ListExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["lists.read", "users.read", "offline.access"]
)
class Output(BlockSchema):
list_ids: list[str] = SchemaField(description="List IDs of the pinned lists")
list_names: list[str] = SchemaField(
description="List names of the pinned lists"
)
data: list[dict] = SchemaField(
description="Response data containing pinned lists"
)
included: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Metadata about the response")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="97e03aae-a62f-11ef-bc53-5b89cb02888f",
description="This block returns the Lists pinned by the authenticated user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetPinnedListsBlock.Input,
output_schema=TwitterGetPinnedListsBlock.Output,
test_input={
"expansions": None,
"list_fields": None,
"user_fields": None,
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("list_ids", ["84839422"]),
("list_names", ["Twitter List"]),
("data", [{"id": "84839422", "name": "Twitter List"}]),
],
test_mock={
"get_pinned_lists": lambda *args, **kwargs: (
[{"id": "84839422", "name": "Twitter List"}],
{},
{},
["84839422"],
["Twitter List"],
)
},
)
@staticmethod
def get_pinned_lists(
credentials: TwitterCredentials,
expansions: ListExpansionsFilter | None,
user_fields: TweetUserFieldsFilter | None,
list_fields: ListFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {"user_auth": False}
params = (
ListExpansionsBuilder(params)
.add_expansions(expansions)
.add_user_fields(user_fields)
.add_list_fields(list_fields)
.build()
)
response = cast(Response, client.get_pinned_lists(**params))
meta = {}
included = {}
list_ids = []
list_names = []
if response.meta:
meta = response.meta
if response.includes:
included = IncludesSerializer.serialize(response.includes)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
list_ids = [str(item.id) for item in response.data]
list_names = [item.name for item in response.data]
return data, included, meta, list_ids, list_names
raise Exception("Lists not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
list_data, included, meta, list_ids, list_names = self.get_pinned_lists(
credentials,
input_data.expansions,
input_data.user_fields,
input_data.list_fields,
)
if list_ids:
yield "list_ids", list_ids
if list_names:
yield "list_names", list_names
if list_data:
yield "data", list_data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,195 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import SpaceExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
SpaceExpansionInputs,
SpaceExpansionsFilter,
SpaceFieldsFilter,
SpaceStatesFilter,
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterSearchSpacesBlock(Block):
"""
Returns live or scheduled Spaces matching specified search terms [for a week only]
"""
class Input(SpaceExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["spaces.read", "users.read", "tweet.read", "offline.access"]
)
query: str = SchemaField(
description="Search term to find in Space titles",
placeholder="Enter search query",
)
max_results: int | None = SchemaField(
description="Maximum number of results to return (1-100)",
placeholder="Enter max results",
default=10,
advanced=True,
)
state: SpaceStatesFilter = SchemaField(
description="Type of Spaces to return (live, scheduled, or all)",
placeholder="Enter state filter",
default=SpaceStatesFilter.all,
)
class Output(BlockSchema):
# Common outputs that user commonly uses
ids: list[str] = SchemaField(description="List of space IDs")
titles: list[str] = SchemaField(description="List of space titles")
host_ids: list = SchemaField(description="List of host IDs")
next_token: str = SchemaField(description="Next token for pagination")
# Complete outputs for advanced use
data: list[dict] = SchemaField(description="Complete space data")
includes: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Metadata including pagination info")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="aaefdd48-a62f-11ef-a73c-3f44df63e276",
description="This block searches for Twitter Spaces based on specified terms.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterSearchSpacesBlock.Input,
output_schema=TwitterSearchSpacesBlock.Output,
test_input={
"query": "tech",
"max_results": 1,
"state": "live",
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"space_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["1234"]),
("titles", ["Tech Talk"]),
("host_ids", ["5678"]),
("data", [{"id": "1234", "title": "Tech Talk", "host_ids": ["5678"]}]),
],
test_mock={
"search_spaces": lambda *args, **kwargs: (
[{"id": "1234", "title": "Tech Talk", "host_ids": ["5678"]}],
{},
{},
["1234"],
["Tech Talk"],
["5678"],
None,
)
},
)
@staticmethod
def search_spaces(
credentials: TwitterCredentials,
query: str,
max_results: int | None,
state: SpaceStatesFilter,
expansions: SpaceExpansionsFilter | None,
space_fields: SpaceFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {"query": query, "max_results": max_results, "state": state.value}
params = (
SpaceExpansionsBuilder(params)
.add_expansions(expansions)
.add_space_fields(space_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.search_spaces(**params))
meta = {}
next_token = ""
if response.meta:
meta = response.meta
if "next_token" in meta:
next_token = meta["next_token"]
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
ids = [str(space["id"]) for space in response.data if "id" in space]
titles = [space["title"] for space in data if "title" in space]
host_ids = [space["host_ids"] for space in data if "host_ids" in space]
return data, included, meta, ids, titles, host_ids, next_token
raise Exception("Spaces not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
data, included, meta, ids, titles, host_ids, next_token = (
self.search_spaces(
credentials,
input_data.query,
input_data.max_results,
input_data.state,
input_data.expansions,
input_data.space_fields,
input_data.user_fields,
)
)
if ids:
yield "ids", ids
if titles:
yield "titles", titles
if host_ids:
yield "host_ids", host_ids
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "includes", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,651 @@
from typing import Literal, Union, cast
import tweepy
from pydantic import BaseModel
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import (
SpaceExpansionsBuilder,
TweetExpansionsBuilder,
UserExpansionsBuilder,
)
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ExpansionFilter,
SpaceExpansionInputs,
SpaceExpansionsFilter,
SpaceFieldsFilter,
TweetExpansionInputs,
TweetFieldsFilter,
TweetMediaFieldsFilter,
TweetPlaceFieldsFilter,
TweetPollFieldsFilter,
TweetUserFieldsFilter,
UserExpansionInputs,
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class SpaceList(BaseModel):
discriminator: Literal["space_list"]
space_ids: list[str] = SchemaField(
description="List of Space IDs to lookup (up to 100)",
placeholder="Enter Space IDs",
default=[],
advanced=False,
)
class UserList(BaseModel):
discriminator: Literal["user_list"]
user_ids: list[str] = SchemaField(
description="List of user IDs to lookup their Spaces (up to 100)",
placeholder="Enter user IDs",
default=[],
advanced=False,
)
class TwitterGetSpacesBlock(Block):
"""
Gets information about multiple Twitter Spaces specified by Space IDs or creator user IDs
"""
class Input(SpaceExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["spaces.read", "users.read", "offline.access"]
)
identifier: Union[SpaceList, UserList] = SchemaField(
discriminator="discriminator",
description="Choose whether to lookup spaces by their IDs or by creator user IDs",
advanced=False,
)
class Output(BlockSchema):
# Common outputs
ids: list[str] = SchemaField(description="List of space IDs")
titles: list[str] = SchemaField(description="List of space titles")
# Complete outputs for advanced use
data: list[dict] = SchemaField(description="Complete space data")
includes: dict = SchemaField(
description="Additional data requested via expansions"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="d75bd7d8-a62f-11ef-b0d8-c7a9496f617f",
description="This block retrieves information about multiple Twitter Spaces.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetSpacesBlock.Input,
output_schema=TwitterGetSpacesBlock.Output,
test_input={
"identifier": {
"discriminator": "space_list",
"space_ids": ["1DXxyRYNejbKM"],
},
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"space_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["1DXxyRYNejbKM"]),
("titles", ["Test Space"]),
(
"data",
[
{
"id": "1DXxyRYNejbKM",
"title": "Test Space",
"host_id": "1234567",
}
],
),
],
test_mock={
"get_spaces": lambda *args, **kwargs: (
[
{
"id": "1DXxyRYNejbKM",
"title": "Test Space",
"host_id": "1234567",
}
],
{},
["1DXxyRYNejbKM"],
["Test Space"],
)
},
)
@staticmethod
def get_spaces(
credentials: TwitterCredentials,
identifier: Union[SpaceList, UserList],
expansions: SpaceExpansionsFilter | None,
space_fields: SpaceFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"ids": (
identifier.space_ids if isinstance(identifier, SpaceList) else None
),
"user_ids": (
identifier.user_ids if isinstance(identifier, UserList) else None
),
}
params = (
SpaceExpansionsBuilder(params)
.add_expansions(expansions)
.add_space_fields(space_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_spaces(**params))
ids = []
titles = []
included = IncludesSerializer.serialize(response.includes)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
ids = [space["id"] for space in data if "id" in space]
titles = [space["title"] for space in data if "title" in space]
return data, included, ids, titles
raise Exception("No spaces found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
data, included, ids, titles = self.get_spaces(
credentials,
input_data.identifier,
input_data.expansions,
input_data.space_fields,
input_data.user_fields,
)
if ids:
yield "ids", ids
if titles:
yield "titles", titles
if data:
yield "data", data
if included:
yield "includes", included
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetSpaceByIdBlock(Block):
"""
Gets information about a single Twitter Space specified by Space ID
"""
class Input(SpaceExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["spaces.read", "users.read", "offline.access"]
)
space_id: str = SchemaField(
description="Space ID to lookup",
placeholder="Enter Space ID",
required=True,
)
class Output(BlockSchema):
# Common outputs
id: str = SchemaField(description="Space ID")
title: str = SchemaField(description="Space title")
host_ids: list[str] = SchemaField(description="Host ID")
# Complete outputs for advanced use
data: dict = SchemaField(description="Complete space data")
includes: dict = SchemaField(
description="Additional data requested via expansions"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="c79700de-a62f-11ef-ab20-fb32bf9d5a9d",
description="This block retrieves information about a single Twitter Space.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetSpaceByIdBlock.Input,
output_schema=TwitterGetSpaceByIdBlock.Output,
test_input={
"space_id": "1DXxyRYNejbKM",
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"space_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", "1DXxyRYNejbKM"),
("title", "Test Space"),
("host_ids", ["1234567"]),
(
"data",
{
"id": "1DXxyRYNejbKM",
"title": "Test Space",
"host_ids": ["1234567"],
},
),
],
test_mock={
"get_space": lambda *args, **kwargs: (
{
"id": "1DXxyRYNejbKM",
"title": "Test Space",
"host_ids": ["1234567"],
},
{},
)
},
)
@staticmethod
def get_space(
credentials: TwitterCredentials,
space_id: str,
expansions: SpaceExpansionsFilter | None,
space_fields: SpaceFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": space_id,
}
params = (
SpaceExpansionsBuilder(params)
.add_expansions(expansions)
.add_space_fields(space_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_space(**params))
includes = {}
if response.includes:
for key, value in response.includes.items():
if isinstance(value, list):
includes[key] = [
item.data if hasattr(item, "data") else item
for item in value
]
else:
includes[key] = value.data if hasattr(value, "data") else value
data = {}
if response.data:
for key, value in response.data.items():
if isinstance(value, list):
data[key] = [
item.data if hasattr(item, "data") else item
for item in value
]
else:
data[key] = value.data if hasattr(value, "data") else value
return data, includes
raise Exception("Space not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
space_data, includes = self.get_space(
credentials,
input_data.space_id,
input_data.expansions,
input_data.space_fields,
input_data.user_fields,
)
# Common outputs
if space_data:
if "id" in space_data:
yield "id", space_data.get("id")
if "title" in space_data:
yield "title", space_data.get("title")
if "host_ids" in space_data:
yield "host_ids", space_data.get("host_ids")
if space_data:
yield "data", space_data
if includes:
yield "includes", includes
except Exception as e:
yield "error", handle_tweepy_exception(e)
# Not tested yet, might have some problem
class TwitterGetSpaceBuyersBlock(Block):
"""
Gets list of users who purchased a ticket to the requested Space
"""
class Input(UserExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["spaces.read", "users.read", "offline.access"]
)
space_id: str = SchemaField(
description="Space ID to lookup buyers for",
placeholder="Enter Space ID",
required=True,
)
class Output(BlockSchema):
# Common outputs
buyer_ids: list[str] = SchemaField(description="List of buyer IDs")
usernames: list[str] = SchemaField(description="List of buyer usernames")
# Complete outputs for advanced use
data: list[dict] = SchemaField(description="Complete space buyers data")
includes: dict = SchemaField(
description="Additional data requested via expansions"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="c1c121a8-a62f-11ef-8b0e-d7b85f96a46f",
description="This block retrieves a list of users who purchased tickets to a Twitter Space.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetSpaceBuyersBlock.Input,
output_schema=TwitterGetSpaceBuyersBlock.Output,
test_input={
"space_id": "1DXxyRYNejbKM",
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("buyer_ids", ["2244994945"]),
("usernames", ["testuser"]),
(
"data",
[{"id": "2244994945", "username": "testuser", "name": "Test User"}],
),
],
test_mock={
"get_space_buyers": lambda *args, **kwargs: (
[{"id": "2244994945", "username": "testuser", "name": "Test User"}],
{},
["2244994945"],
["testuser"],
)
},
)
@staticmethod
def get_space_buyers(
credentials: TwitterCredentials,
space_id: str,
expansions: UserExpansionsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": space_id,
}
params = (
UserExpansionsBuilder(params)
.add_expansions(expansions)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_space_buyers(**params))
included = IncludesSerializer.serialize(response.includes)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
buyer_ids = [buyer["id"] for buyer in data]
usernames = [buyer["username"] for buyer in data]
return data, included, buyer_ids, usernames
raise Exception("No buyers found for this Space")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
buyers_data, included, buyer_ids, usernames = self.get_space_buyers(
credentials,
input_data.space_id,
input_data.expansions,
input_data.user_fields,
)
if buyer_ids:
yield "buyer_ids", buyer_ids
if usernames:
yield "usernames", usernames
if buyers_data:
yield "data", buyers_data
if included:
yield "includes", included
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetSpaceTweetsBlock(Block):
"""
Gets list of Tweets shared in the requested Space
"""
class Input(TweetExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["spaces.read", "users.read", "offline.access"]
)
space_id: str = SchemaField(
description="Space ID to lookup tweets for",
placeholder="Enter Space ID",
required=True,
)
class Output(BlockSchema):
# Common outputs
tweet_ids: list[str] = SchemaField(description="List of tweet IDs")
texts: list[str] = SchemaField(description="List of tweet texts")
# Complete outputs for advanced use
data: list[dict] = SchemaField(description="Complete space tweets data")
includes: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Response metadata")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="b69731e6-a62f-11ef-b2d4-1bf14dd6aee4",
description="This block retrieves tweets shared in a Twitter Space.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetSpaceTweetsBlock.Input,
output_schema=TwitterGetSpaceTweetsBlock.Output,
test_input={
"space_id": "1DXxyRYNejbKM",
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"media_fields": None,
"place_fields": None,
"poll_fields": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("tweet_ids", ["1234567890"]),
("texts", ["Test tweet"]),
("data", [{"id": "1234567890", "text": "Test tweet"}]),
],
test_mock={
"get_space_tweets": lambda *args, **kwargs: (
[{"id": "1234567890", "text": "Test tweet"}], # data
{},
["1234567890"],
["Test tweet"],
{},
)
},
)
@staticmethod
def get_space_tweets(
credentials: TwitterCredentials,
space_id: str,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": space_id,
}
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_space_tweets(**params))
included = IncludesSerializer.serialize(response.includes)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
tweet_ids = [str(tweet["id"]) for tweet in data]
texts = [tweet["text"] for tweet in data]
meta = response.meta or {}
return data, included, tweet_ids, texts, meta
raise Exception("No tweets found for this Space")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
tweets_data, included, tweet_ids, texts, meta = self.get_space_tweets(
credentials,
input_data.space_id,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
if tweet_ids:
yield "tweet_ids", tweet_ids
if texts:
yield "texts", texts
if tweets_data:
yield "data", tweets_data
if included:
yield "includes", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,20 @@
import tweepy
def handle_tweepy_exception(e: Exception) -> str:
if isinstance(e, tweepy.BadRequest):
return f"Bad Request (400): {str(e)}"
elif isinstance(e, tweepy.Unauthorized):
return f"Unauthorized (401): {str(e)}"
elif isinstance(e, tweepy.Forbidden):
return f"Forbidden (403): {str(e)}"
elif isinstance(e, tweepy.NotFound):
return f"Not Found (404): {str(e)}"
elif isinstance(e, tweepy.TooManyRequests):
return f"Too Many Requests (429): {str(e)}"
elif isinstance(e, tweepy.TwitterServerError):
return f"Twitter Server Error (5xx): {str(e)}"
elif isinstance(e, tweepy.TweepyException):
return f"Tweepy Error: {str(e)}"
else:
return f"Unexpected error: {str(e)}"

View File

@ -0,0 +1,372 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import TweetExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ExpansionFilter,
TweetExpansionInputs,
TweetFieldsFilter,
TweetMediaFieldsFilter,
TweetPlaceFieldsFilter,
TweetPollFieldsFilter,
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterBookmarkTweetBlock(Block):
"""
Bookmark a tweet on Twitter
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "bookmark.write", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet to bookmark",
placeholder="Enter tweet ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the bookmark was successful")
error: str = SchemaField(description="Error message if the bookmark failed")
def __init__(self):
super().__init__(
id="f33d67be-a62f-11ef-a797-ff83ec29ee8e",
description="This block bookmarks a tweet on Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterBookmarkTweetBlock.Input,
output_schema=TwitterBookmarkTweetBlock.Output,
test_input={
"tweet_id": "1234567890",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"bookmark_tweet": lambda *args, **kwargs: True},
)
@staticmethod
def bookmark_tweet(
credentials: TwitterCredentials,
tweet_id: str,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.bookmark(tweet_id)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.bookmark_tweet(credentials, input_data.tweet_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetBookmarkedTweetsBlock(Block):
"""
Get All your bookmarked tweets from Twitter
"""
class Input(TweetExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "bookmark.read", "users.read", "offline.access"]
)
max_results: int | None = SchemaField(
description="Maximum number of results to return (1-100)",
placeholder="Enter max results",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for pagination",
placeholder="Enter pagination token",
default="",
advanced=True,
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
id: list[str] = SchemaField(description="All Tweet IDs")
text: list[str] = SchemaField(description="All Tweet texts")
userId: list[str] = SchemaField(description="IDs of the tweet authors")
userName: list[str] = SchemaField(description="Usernames of the tweet authors")
# Complete Outputs for advanced use
data: list[dict] = SchemaField(description="Complete Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(
description="Provides metadata such as pagination info (next_token) or result counts"
)
next_token: str = SchemaField(description="Next token for pagination")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="ed26783e-a62f-11ef-9a21-c77c57dd8a1f",
description="This block retrieves bookmarked tweets from Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetBookmarkedTweetsBlock.Input,
output_schema=TwitterGetBookmarkedTweetsBlock.Output,
test_input={
"max_results": 2,
"pagination_token": None,
"expansions": None,
"media_fields": None,
"place_fields": None,
"poll_fields": None,
"tweet_fields": None,
"user_fields": None,
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", ["1234567890"]),
("text", ["Test tweet"]),
("userId", ["12345"]),
("userName", ["testuser"]),
("data", [{"id": "1234567890", "text": "Test tweet"}]),
],
test_mock={
"get_bookmarked_tweets": lambda *args, **kwargs: (
["1234567890"],
["Test tweet"],
["12345"],
["testuser"],
[{"id": "1234567890", "text": "Test tweet"}],
{},
{},
None,
)
},
)
@staticmethod
def get_bookmarked_tweets(
credentials: TwitterCredentials,
max_results: int | None,
pagination_token: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
}
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(
Response,
client.get_bookmarks(**params),
)
meta = {}
tweet_ids = []
tweet_texts = []
user_ids = []
user_names = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
tweet_ids = [str(tweet.id) for tweet in response.data]
tweet_texts = [tweet.text for tweet in response.data]
if "users" in included:
for user in included["users"]:
user_ids.append(str(user["id"]))
user_names.append(user["username"])
return (
tweet_ids,
tweet_texts,
user_ids,
user_names,
data,
included,
meta,
next_token,
)
raise Exception("No bookmarked tweets found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, texts, user_ids, user_names, data, included, meta, next_token = (
self.get_bookmarked_tweets(
credentials,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
)
if ids:
yield "id", ids
if texts:
yield "text", texts
if user_ids:
yield "userId", user_ids
if user_names:
yield "userName", user_names
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
if next_token:
yield "next_token", next_token
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterRemoveBookmarkTweetBlock(Block):
"""
Remove a bookmark for a tweet on Twitter
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "bookmark.write", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet to remove bookmark from",
placeholder="Enter tweet ID",
)
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the bookmark was successfully removed"
)
error: str = SchemaField(
description="Error message if the bookmark removal failed"
)
def __init__(self):
super().__init__(
id="e4100684-a62f-11ef-9be9-770cb41a2616",
description="This block removes a bookmark from a tweet on Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterRemoveBookmarkTweetBlock.Input,
output_schema=TwitterRemoveBookmarkTweetBlock.Output,
test_input={
"tweet_id": "1234567890",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"remove_bookmark_tweet": lambda *args, **kwargs: True},
)
@staticmethod
def remove_bookmark_tweet(
credentials: TwitterCredentials,
tweet_id: str,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.remove_bookmark(tweet_id)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.remove_bookmark_tweet(credentials, input_data.tweet_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,154 @@
import tweepy
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterHideReplyBlock(Block):
"""
Hides a reply of one of your tweets
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "tweet.moderate.write", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet reply to hide",
placeholder="Enter tweet ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the operation was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="07d58b3e-a630-11ef-a030-93701d1a465e",
description="This block hides a reply to a tweet.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterHideReplyBlock.Input,
output_schema=TwitterHideReplyBlock.Output,
test_input={
"tweet_id": "1234567890",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"hide_reply": lambda *args, **kwargs: True},
)
@staticmethod
def hide_reply(
credentials: TwitterCredentials,
tweet_id: str,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.hide_reply(id=tweet_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.hide_reply(
credentials,
input_data.tweet_id,
)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterUnhideReplyBlock(Block):
"""
Unhides a reply to a tweet
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "tweet.moderate.write", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet reply to unhide",
placeholder="Enter tweet ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the operation was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="fcf9e4e4-a62f-11ef-9d85-57d3d06b616a",
description="This block unhides a reply to a tweet.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterUnhideReplyBlock.Input,
output_schema=TwitterUnhideReplyBlock.Output,
test_input={
"tweet_id": "1234567890",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"unhide_reply": lambda *args, **kwargs: True},
)
@staticmethod
def unhide_reply(
credentials: TwitterCredentials,
tweet_id: str,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.unhide_reply(id=tweet_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.unhide_reply(
credentials,
input_data.tweet_id,
)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,576 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import (
TweetExpansionsBuilder,
UserExpansionsBuilder,
)
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ExpansionFilter,
TweetExpansionInputs,
TweetFieldsFilter,
TweetMediaFieldsFilter,
TweetPlaceFieldsFilter,
TweetPollFieldsFilter,
TweetUserFieldsFilter,
UserExpansionInputs,
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterLikeTweetBlock(Block):
"""
Likes a tweet
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "like.write", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet to like",
placeholder="Enter tweet ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the operation was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="4d0b4c5c-a630-11ef-8e08-1b14c507b347",
description="This block likes a tweet.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterLikeTweetBlock.Input,
output_schema=TwitterLikeTweetBlock.Output,
test_input={
"tweet_id": "1234567890",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"like_tweet": lambda *args, **kwargs: True},
)
@staticmethod
def like_tweet(
credentials: TwitterCredentials,
tweet_id: str,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.like(tweet_id=tweet_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.like_tweet(
credentials,
input_data.tweet_id,
)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetLikingUsersBlock(Block):
"""
Gets information about users who liked a one of your tweet
"""
class Input(UserExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "like.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet to get liking users for",
placeholder="Enter tweet ID",
)
max_results: int | None = SchemaField(
description="Maximum number of results to return (1-100)",
placeholder="Enter max results",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for getting next/previous page of results",
placeholder="Enter pagination token",
default="",
advanced=True,
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
id: list[str] = SchemaField(description="All User IDs who liked the tweet")
username: list[str] = SchemaField(
description="All User usernames who liked the tweet"
)
next_token: str = SchemaField(description="Next token for pagination")
# Complete Outputs for advanced use
data: list[dict] = SchemaField(description="Complete Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(
description="Provides metadata such as pagination info (next_token) or result counts"
)
# error
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="34275000-a630-11ef-b01e-5f00d9077c08",
description="This block gets information about users who liked a tweet.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetLikingUsersBlock.Input,
output_schema=TwitterGetLikingUsersBlock.Output,
test_input={
"tweet_id": "1234567890",
"max_results": 1,
"pagination_token": None,
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", ["1234567890"]),
("username", ["testuser"]),
("data", [{"id": "1234567890", "username": "testuser"}]),
],
test_mock={
"get_liking_users": lambda *args, **kwargs: (
["1234567890"],
["testuser"],
[{"id": "1234567890", "username": "testuser"}],
{},
{},
None,
)
},
)
@staticmethod
def get_liking_users(
credentials: TwitterCredentials,
tweet_id: str,
max_results: int | None,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": tweet_id,
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
UserExpansionsBuilder(params)
.add_expansions(expansions)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_liking_users(**params))
if not response.data and not response.meta:
raise Exception("No liking users found")
meta = {}
user_ids = []
usernames = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
user_ids = [str(user.id) for user in response.data]
usernames = [user.username for user in response.data]
return user_ids, usernames, data, included, meta, next_token
raise Exception("No liking users found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, usernames, data, included, meta, next_token = self.get_liking_users(
credentials,
input_data.tweet_id,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,
)
if ids:
yield "id", ids
if usernames:
yield "username", usernames
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetLikedTweetsBlock(Block):
"""
Gets information about tweets liked by you
"""
class Input(TweetExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "like.read", "offline.access"]
)
user_id: str = SchemaField(
description="ID of the user to get liked tweets for",
placeholder="Enter user ID",
)
max_results: int | None = SchemaField(
description="Maximum number of results to return (5-100)",
placeholder="100",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for getting next/previous page of results",
placeholder="Enter pagination token",
default="",
advanced=True,
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
ids: list[str] = SchemaField(description="All Tweet IDs")
texts: list[str] = SchemaField(description="All Tweet texts")
userIds: list[str] = SchemaField(
description="List of user ids that authored the tweets"
)
userNames: list[str] = SchemaField(
description="List of user names that authored the tweets"
)
next_token: str = SchemaField(description="Next token for pagination")
# Complete Outputs for advanced use
data: list[dict] = SchemaField(description="Complete Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(
description="Provides metadata such as pagination info (next_token) or result counts"
)
# error
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="292e7c78-a630-11ef-9f40-df5dffaca106",
description="This block gets information about tweets liked by a user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetLikedTweetsBlock.Input,
output_schema=TwitterGetLikedTweetsBlock.Output,
test_input={
"user_id": "1234567890",
"max_results": 2,
"pagination_token": None,
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"media_fields": None,
"place_fields": None,
"poll_fields": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["12345", "67890"]),
("texts", ["Tweet 1", "Tweet 2"]),
("userIds", ["67890", "67891"]),
("userNames", ["testuser1", "testuser2"]),
(
"data",
[
{"id": "12345", "text": "Tweet 1"},
{"id": "67890", "text": "Tweet 2"},
],
),
],
test_mock={
"get_liked_tweets": lambda *args, **kwargs: (
["12345", "67890"],
["Tweet 1", "Tweet 2"],
["67890", "67891"],
["testuser1", "testuser2"],
[
{"id": "12345", "text": "Tweet 1"},
{"id": "67890", "text": "Tweet 2"},
],
{},
{},
None,
)
},
)
@staticmethod
def get_liked_tweets(
credentials: TwitterCredentials,
user_id: str,
max_results: int | None,
pagination_token: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": user_id,
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_liked_tweets(**params))
if not response.data and not response.meta:
raise Exception("No liked tweets found")
meta = {}
tweet_ids = []
tweet_texts = []
user_ids = []
user_names = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
tweet_ids = [str(tweet.id) for tweet in response.data]
tweet_texts = [tweet.text for tweet in response.data]
if "users" in response.includes:
user_ids = [str(user["id"]) for user in response.includes["users"]]
user_names = [
user["username"] for user in response.includes["users"]
]
return (
tweet_ids,
tweet_texts,
user_ids,
user_names,
data,
included,
meta,
next_token,
)
raise Exception("No liked tweets found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, texts, user_ids, user_names, data, included, meta, next_token = (
self.get_liked_tweets(
credentials,
input_data.user_id,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
)
if ids:
yield "ids", ids
if texts:
yield "texts", texts
if user_ids:
yield "userIds", user_ids
if user_names:
yield "userNames", user_names
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterUnlikeTweetBlock(Block):
"""
Unlikes a tweet that was previously liked
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "like.write", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet to unlike",
placeholder="Enter tweet ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the operation was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="1ed5eab8-a630-11ef-8e21-cbbbc80cbb85",
description="This block unlikes a tweet.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterUnlikeTweetBlock.Input,
output_schema=TwitterUnlikeTweetBlock.Output,
test_input={
"tweet_id": "1234567890",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"unlike_tweet": lambda *args, **kwargs: True},
)
@staticmethod
def unlike_tweet(
credentials: TwitterCredentials,
tweet_id: str,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.unlike(tweet_id=tweet_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.unlike_tweet(
credentials,
input_data.tweet_id,
)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,545 @@
from datetime import datetime
from typing import List, Literal, Optional, Union, cast
import tweepy
from pydantic import BaseModel
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import (
TweetDurationBuilder,
TweetExpansionsBuilder,
TweetPostBuilder,
TweetSearchBuilder,
)
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ExpansionFilter,
TweetExpansionInputs,
TweetFieldsFilter,
TweetMediaFieldsFilter,
TweetPlaceFieldsFilter,
TweetPollFieldsFilter,
TweetReplySettingsFilter,
TweetTimeWindowInputs,
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class Media(BaseModel):
discriminator: Literal["media"]
media_ids: Optional[List[str]] = None
media_tagged_user_ids: Optional[List[str]] = None
class DeepLink(BaseModel):
discriminator: Literal["deep_link"]
direct_message_deep_link: Optional[str] = None
class Poll(BaseModel):
discriminator: Literal["poll"]
poll_options: Optional[List[str]] = None
poll_duration_minutes: Optional[int] = None
class Place(BaseModel):
discriminator: Literal["place"]
place_id: Optional[str] = None
class Quote(BaseModel):
discriminator: Literal["quote"]
quote_tweet_id: Optional[str] = None
class TwitterPostTweetBlock(Block):
"""
Create a tweet on Twitter with the option to include one additional element such as a media, quote, or deep link.
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "tweet.write", "users.read", "offline.access"]
)
tweet_text: str | None = SchemaField(
description="Text of the tweet to post",
placeholder="Enter your tweet",
default=None,
advanced=False,
)
for_super_followers_only: bool = SchemaField(
description="Tweet exclusively for Super Followers",
placeholder="Enter for super followers only",
advanced=True,
default=False,
)
attachment: Union[Media, DeepLink, Poll, Place, Quote] | None = SchemaField(
discriminator="discriminator",
description="Additional tweet data (media, deep link, poll, place or quote)",
advanced=True,
)
exclude_reply_user_ids: Optional[List[str]] = SchemaField(
description="User IDs to exclude from reply Tweet thread. [ex - 6253282]",
placeholder="Enter user IDs to exclude",
advanced=True,
default=None,
)
in_reply_to_tweet_id: Optional[str] = SchemaField(
description="Tweet ID being replied to. Please note that in_reply_to_tweet_id needs to be in the request if exclude_reply_user_ids is present",
default=None,
placeholder="Enter in reply to tweet ID",
advanced=True,
)
reply_settings: TweetReplySettingsFilter = SchemaField(
description="Who can reply to the Tweet (mentionedUsers or following)",
placeholder="Enter reply settings",
advanced=True,
default=TweetReplySettingsFilter(All_Users=True),
)
class Output(BlockSchema):
tweet_id: str = SchemaField(description="ID of the created tweet")
tweet_url: str = SchemaField(description="URL to the tweet")
error: str = SchemaField(
description="Error message if the tweet posting failed"
)
def __init__(self):
super().__init__(
id="7bb0048a-a630-11ef-aeb8-abc0dadb9b12",
description="This block posts a tweet on Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterPostTweetBlock.Input,
output_schema=TwitterPostTweetBlock.Output,
test_input={
"tweet_text": "This is a test tweet.",
"credentials": TEST_CREDENTIALS_INPUT,
"attachment": {
"discriminator": "deep_link",
"direct_message_deep_link": "https://twitter.com/messages/compose",
},
"for_super_followers_only": False,
"exclude_reply_user_ids": [],
"in_reply_to_tweet_id": "",
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("tweet_id", "1234567890"),
("tweet_url", "https://twitter.com/user/status/1234567890"),
],
test_mock={
"post_tweet": lambda *args, **kwargs: (
"1234567890",
"https://twitter.com/user/status/1234567890",
)
},
)
def post_tweet(
self,
credentials: TwitterCredentials,
input_txt: str | None,
attachment: Union[Media, DeepLink, Poll, Place, Quote] | None,
for_super_followers_only: bool,
exclude_reply_user_ids: Optional[List[str]],
in_reply_to_tweet_id: Optional[str],
reply_settings: TweetReplySettingsFilter,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = (
TweetPostBuilder()
.add_text(input_txt)
.add_super_followers(for_super_followers_only)
.add_reply_settings(
exclude_reply_user_ids or [],
in_reply_to_tweet_id or "",
reply_settings,
)
)
if isinstance(attachment, Media):
params.add_media(
attachment.media_ids or [], attachment.media_tagged_user_ids or []
)
elif isinstance(attachment, DeepLink):
params.add_deep_link(attachment.direct_message_deep_link or "")
elif isinstance(attachment, Poll):
params.add_poll_options(attachment.poll_options or [])
params.add_poll_duration(attachment.poll_duration_minutes or 0)
elif isinstance(attachment, Place):
params.add_place(attachment.place_id or "")
elif isinstance(attachment, Quote):
params.add_quote(attachment.quote_tweet_id or "")
tweet = cast(Response, client.create_tweet(**params.build()))
if not tweet.data:
raise Exception("Failed to create tweet")
tweet_id = tweet.data["id"]
tweet_url = f"https://twitter.com/user/status/{tweet_id}"
return str(tweet_id), tweet_url
except tweepy.TweepyException:
raise
except Exception:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
tweet_id, tweet_url = self.post_tweet(
credentials,
input_data.tweet_text,
input_data.attachment,
input_data.for_super_followers_only,
input_data.exclude_reply_user_ids,
input_data.in_reply_to_tweet_id,
input_data.reply_settings,
)
yield "tweet_id", tweet_id
yield "tweet_url", tweet_url
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterDeleteTweetBlock(Block):
"""
Deletes a tweet on Twitter using twitter Id
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "tweet.write", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet to delete",
placeholder="Enter tweet ID",
)
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the tweet was successfully deleted"
)
error: str = SchemaField(
description="Error message if the tweet deletion failed"
)
def __init__(self):
super().__init__(
id="761babf0-a630-11ef-a03d-abceb082f58f",
description="This block deletes a tweet on Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterDeleteTweetBlock.Input,
output_schema=TwitterDeleteTweetBlock.Output,
test_input={
"tweet_id": "1234567890",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"delete_tweet": lambda *args, **kwargs: True},
)
@staticmethod
def delete_tweet(credentials: TwitterCredentials, tweet_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.delete_tweet(id=tweet_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
except Exception:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.delete_tweet(
credentials,
input_data.tweet_id,
)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterSearchRecentTweetsBlock(Block):
"""
Searches all public Tweets in Twitter history
"""
class Input(TweetExpansionInputs, TweetTimeWindowInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "offline.access"]
)
query: str = SchemaField(
description="Search query (up to 1024 characters)",
placeholder="Enter search query",
)
max_results: int = SchemaField(
description="Maximum number of results per page (10-500)",
placeholder="Enter max results",
default=10,
advanced=True,
)
pagination: str | None = SchemaField(
description="Token for pagination",
default="",
placeholder="Enter pagination token",
advanced=True,
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
tweet_ids: list[str] = SchemaField(description="All Tweet IDs")
tweet_texts: list[str] = SchemaField(description="All Tweet texts")
next_token: str = SchemaField(description="Next token for pagination")
# Complete Outputs for advanced use
data: list[dict] = SchemaField(description="Complete Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(
description="Provides metadata such as pagination info (next_token) or result counts"
)
# error
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="53e5cf8e-a630-11ef-ba85-df6d666fa5d5",
description="This block searches all public Tweets in Twitter history.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterSearchRecentTweetsBlock.Input,
output_schema=TwitterSearchRecentTweetsBlock.Output,
test_input={
"query": "from:twitterapi #twitterapi",
"credentials": TEST_CREDENTIALS_INPUT,
"max_results": 2,
"start_time": "2024-12-14T18:30:00.000Z",
"end_time": "2024-12-17T18:30:00.000Z",
"since_id": None,
"until_id": None,
"sort_order": None,
"pagination": None,
"expansions": None,
"media_fields": None,
"place_fields": None,
"poll_fields": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("tweet_ids", ["1373001119480344583", "1372627771717869568"]),
(
"tweet_texts",
[
"Looking to get started with the Twitter API but new to APIs in general?",
"Thanks to everyone who joined and made today a great session!",
],
),
(
"data",
[
{
"id": "1373001119480344583",
"text": "Looking to get started with the Twitter API but new to APIs in general?",
},
{
"id": "1372627771717869568",
"text": "Thanks to everyone who joined and made today a great session!",
},
],
),
],
test_mock={
"search_tweets": lambda *args, **kwargs: (
["1373001119480344583", "1372627771717869568"],
[
"Looking to get started with the Twitter API but new to APIs in general?",
"Thanks to everyone who joined and made today a great session!",
],
[
{
"id": "1373001119480344583",
"text": "Looking to get started with the Twitter API but new to APIs in general?",
},
{
"id": "1372627771717869568",
"text": "Thanks to everyone who joined and made today a great session!",
},
],
{},
{},
None,
)
},
)
@staticmethod
def search_tweets(
credentials: TwitterCredentials,
query: str,
max_results: int,
start_time: datetime | None,
end_time: datetime | None,
since_id: str | None,
until_id: str | None,
sort_order: str | None,
pagination: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
# Building common params
params = (
TweetSearchBuilder()
.add_query(query)
.add_pagination(max_results, pagination)
.build()
)
# Adding expansions to params If required by the user
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
# Adding time window to params If required by the user
params = (
TweetDurationBuilder(params)
.add_start_time(start_time)
.add_end_time(end_time)
.add_since_id(since_id)
.add_until_id(until_id)
.add_sort_order(sort_order)
.build()
)
response = cast(Response, client.search_recent_tweets(**params))
if not response.data and not response.meta:
raise Exception("No tweets found")
meta = {}
tweet_ids = []
tweet_texts = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
tweet_ids = [str(tweet.id) for tweet in response.data]
tweet_texts = [tweet.text for tweet in response.data]
return tweet_ids, tweet_texts, data, included, meta, next_token
raise Exception("No tweets found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, texts, data, included, meta, next_token = self.search_tweets(
credentials,
input_data.query,
input_data.max_results,
input_data.start_time,
input_data.end_time,
input_data.since_id,
input_data.until_id,
input_data.sort_order,
input_data.pagination,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
if ids:
yield "tweet_ids", ids
if texts:
yield "tweet_texts", texts
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,222 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import TweetExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ExpansionFilter,
TweetExcludesFilter,
TweetExpansionInputs,
TweetFieldsFilter,
TweetMediaFieldsFilter,
TweetPlaceFieldsFilter,
TweetPollFieldsFilter,
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterGetQuoteTweetsBlock(Block):
"""
Gets quote tweets for a specified tweet ID
"""
class Input(TweetExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet to get quotes for",
placeholder="Enter tweet ID",
)
max_results: int | None = SchemaField(
description="Number of results to return (max 100)",
default=10,
advanced=True,
)
exclude: TweetExcludesFilter | None = SchemaField(
description="Types of tweets to exclude", advanced=True, default=None
)
pagination_token: str | None = SchemaField(
description="Token for pagination",
advanced=True,
default="",
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
ids: list = SchemaField(description="All Tweet IDs ")
texts: list = SchemaField(description="All Tweet texts")
next_token: str = SchemaField(description="Next token for pagination")
# Complete Outputs for advanced use
data: list[dict] = SchemaField(description="Complete Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(
description="Provides metadata such as pagination info (next_token) or result counts"
)
# error
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="9fbdd208-a630-11ef-9b97-ab7a3a695ca3",
description="This block gets quote tweets for a specific tweet.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetQuoteTweetsBlock.Input,
output_schema=TwitterGetQuoteTweetsBlock.Output,
test_input={
"tweet_id": "1234567890",
"max_results": 2,
"pagination_token": None,
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["12345", "67890"]),
("texts", ["Tweet 1", "Tweet 2"]),
(
"data",
[
{"id": "12345", "text": "Tweet 1"},
{"id": "67890", "text": "Tweet 2"},
],
),
],
test_mock={
"get_quote_tweets": lambda *args, **kwargs: (
["12345", "67890"],
["Tweet 1", "Tweet 2"],
[
{"id": "12345", "text": "Tweet 1"},
{"id": "67890", "text": "Tweet 2"},
],
{},
{},
None,
)
},
)
@staticmethod
def get_quote_tweets(
credentials: TwitterCredentials,
tweet_id: str,
max_results: int | None,
exclude: TweetExcludesFilter | None,
pagination_token: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": tweet_id,
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"exclude": None if exclude == TweetExcludesFilter() else exclude,
"user_auth": False,
}
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_quote_tweets(**params))
meta = {}
tweet_ids = []
tweet_texts = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
tweet_ids = [str(tweet.id) for tweet in response.data]
tweet_texts = [tweet.text for tweet in response.data]
return tweet_ids, tweet_texts, data, included, meta, next_token
raise Exception("No quote tweets found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, texts, data, included, meta, next_token = self.get_quote_tweets(
credentials,
input_data.tweet_id,
input_data.max_results,
input_data.exclude,
input_data.pagination_token,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
if ids:
yield "ids", ids
if texts:
yield "texts", texts
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,363 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import UserExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
TweetFieldsFilter,
TweetUserFieldsFilter,
UserExpansionInputs,
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterRetweetBlock(Block):
"""
Retweets a tweet on Twitter
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "tweet.write", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet to retweet",
placeholder="Enter tweet ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the retweet was successful")
error: str = SchemaField(description="Error message if the retweet failed")
def __init__(self):
super().__init__(
id="bd7b8d3a-a630-11ef-be96-6f4aa4c3c4f4",
description="This block retweets a tweet on Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterRetweetBlock.Input,
output_schema=TwitterRetweetBlock.Output,
test_input={
"tweet_id": "1234567890",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"retweet": lambda *args, **kwargs: True},
)
@staticmethod
def retweet(
credentials: TwitterCredentials,
tweet_id: str,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.retweet(
tweet_id=tweet_id,
user_auth=False,
)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.retweet(
credentials,
input_data.tweet_id,
)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterRemoveRetweetBlock(Block):
"""
Removes a retweet on Twitter
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "tweet.write", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet to remove retweet",
placeholder="Enter tweet ID",
)
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the retweet was successfully removed"
)
error: str = SchemaField(description="Error message if the removal failed")
def __init__(self):
super().__init__(
id="b6e663f0-a630-11ef-a7f0-8b9b0c542ff8",
description="This block removes a retweet on Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterRemoveRetweetBlock.Input,
output_schema=TwitterRemoveRetweetBlock.Output,
test_input={
"tweet_id": "1234567890",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"remove_retweet": lambda *args, **kwargs: True},
)
@staticmethod
def remove_retweet(
credentials: TwitterCredentials,
tweet_id: str,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.unretweet(
source_tweet_id=tweet_id,
user_auth=False,
)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.remove_retweet(
credentials,
input_data.tweet_id,
)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetRetweetersBlock(Block):
"""
Gets information about who has retweeted a tweet
"""
class Input(UserExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="ID of the tweet to get retweeters for",
placeholder="Enter tweet ID",
)
max_results: int | None = SchemaField(
description="Maximum number of results per page (1-100)",
default=10,
placeholder="Enter max results",
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for pagination",
placeholder="Enter pagination token",
default="",
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
ids: list = SchemaField(description="List of user ids who retweeted")
names: list = SchemaField(description="List of user names who retweeted")
usernames: list = SchemaField(
description="List of user usernames who retweeted"
)
next_token: str = SchemaField(description="Token for next page of results")
# Complete Outputs for advanced use
data: list[dict] = SchemaField(description="Complete Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(
description="Provides metadata such as pagination info (next_token) or result counts"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="ad7aa6fa-a630-11ef-a6b0-e7ca640aa030",
description="This block gets information about who has retweeted a tweet.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetRetweetersBlock.Input,
output_schema=TwitterGetRetweetersBlock.Output,
test_input={
"tweet_id": "1234567890",
"credentials": TEST_CREDENTIALS_INPUT,
"max_results": 1,
"pagination_token": "",
"expansions": None,
"media_fields": None,
"place_fields": None,
"poll_fields": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["12345"]),
("names", ["Test User"]),
("usernames", ["testuser"]),
(
"data",
[{"id": "12345", "name": "Test User", "username": "testuser"}],
),
],
test_mock={
"get_retweeters": lambda *args, **kwargs: (
[{"id": "12345", "name": "Test User", "username": "testuser"}],
{},
{},
["12345"],
["Test User"],
["testuser"],
None,
)
},
)
@staticmethod
def get_retweeters(
credentials: TwitterCredentials,
tweet_id: str,
max_results: int | None,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": tweet_id,
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
UserExpansionsBuilder(params)
.add_expansions(expansions)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_retweeters(**params))
meta = {}
ids = []
names = []
usernames = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
ids = [str(user.id) for user in response.data]
names = [user.name for user in response.data]
usernames = [user.username for user in response.data]
return data, included, meta, ids, names, usernames, next_token
raise Exception("No retweeters found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
data, included, meta, ids, names, usernames, next_token = (
self.get_retweeters(
credentials,
input_data.tweet_id,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,
)
)
if ids:
yield "ids", ids
if names:
yield "names", names
if usernames:
yield "usernames", usernames
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,757 @@
from datetime import datetime
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import (
TweetDurationBuilder,
TweetExpansionsBuilder,
)
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ExpansionFilter,
TweetExpansionInputs,
TweetFieldsFilter,
TweetMediaFieldsFilter,
TweetPlaceFieldsFilter,
TweetPollFieldsFilter,
TweetTimeWindowInputs,
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterGetUserMentionsBlock(Block):
"""
Returns Tweets where a single user is mentioned, just put that user id
"""
class Input(TweetExpansionInputs, TweetTimeWindowInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "offline.access"]
)
user_id: str = SchemaField(
description="Unique identifier of the user for whom to return Tweets mentioning the user",
placeholder="Enter user ID",
)
max_results: int | None = SchemaField(
description="Number of tweets to retrieve (5-100)",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for pagination", default="", advanced=True
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
ids: list[str] = SchemaField(description="List of Tweet IDs")
texts: list[str] = SchemaField(description="All Tweet texts")
userIds: list[str] = SchemaField(
description="List of user ids that mentioned the user"
)
userNames: list[str] = SchemaField(
description="List of user names that mentioned the user"
)
next_token: str = SchemaField(description="Next token for pagination")
# Complete Outputs for advanced use
data: list[dict] = SchemaField(description="Complete Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(
description="Provides metadata such as pagination info (next_token) or result counts"
)
# error
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="e01c890c-a630-11ef-9e20-37da24888bd0",
description="This block retrieves Tweets mentioning a specific user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetUserMentionsBlock.Input,
output_schema=TwitterGetUserMentionsBlock.Output,
test_input={
"user_id": "12345",
"credentials": TEST_CREDENTIALS_INPUT,
"max_results": 2,
"start_time": "2024-12-14T18:30:00.000Z",
"end_time": "2024-12-17T18:30:00.000Z",
"since_id": "",
"until_id": "",
"sort_order": None,
"pagination_token": None,
"expansions": None,
"media_fields": None,
"place_fields": None,
"poll_fields": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["1373001119480344583", "1372627771717869568"]),
("texts", ["Test mention 1", "Test mention 2"]),
("userIds", ["67890", "67891"]),
("userNames", ["testuser1", "testuser2"]),
(
"data",
[
{"id": "1373001119480344583", "text": "Test mention 1"},
{"id": "1372627771717869568", "text": "Test mention 2"},
],
),
],
test_mock={
"get_mentions": lambda *args, **kwargs: (
["1373001119480344583", "1372627771717869568"],
["Test mention 1", "Test mention 2"],
["67890", "67891"],
["testuser1", "testuser2"],
[
{"id": "1373001119480344583", "text": "Test mention 1"},
{"id": "1372627771717869568", "text": "Test mention 2"},
],
{},
{},
None,
)
},
)
@staticmethod
def get_mentions(
credentials: TwitterCredentials,
user_id: str,
max_results: int | None,
start_time: datetime | None,
end_time: datetime | None,
since_id: str | None,
until_id: str | None,
sort_order: str | None,
pagination: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": user_id,
"max_results": max_results,
"pagination_token": None if pagination == "" else pagination,
"user_auth": False,
}
# Adding expansions to params If required by the user
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
# Adding time window to params If required by the user
params = (
TweetDurationBuilder(params)
.add_start_time(start_time)
.add_end_time(end_time)
.add_since_id(since_id)
.add_until_id(until_id)
.add_sort_order(sort_order)
.build()
)
response = cast(
Response,
client.get_users_mentions(**params),
)
if not response.data and not response.meta:
raise Exception("No tweets found")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
meta = response.meta or {}
next_token = meta.get("next_token", "")
tweet_ids = []
tweet_texts = []
user_ids = []
user_names = []
if response.data:
tweet_ids = [str(tweet.id) for tweet in response.data]
tweet_texts = [tweet.text for tweet in response.data]
if "users" in included:
user_ids = [str(user["id"]) for user in included["users"]]
user_names = [user["username"] for user in included["users"]]
return (
tweet_ids,
tweet_texts,
user_ids,
user_names,
data,
included,
meta,
next_token,
)
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, texts, user_ids, user_names, data, included, meta, next_token = (
self.get_mentions(
credentials,
input_data.user_id,
input_data.max_results,
input_data.start_time,
input_data.end_time,
input_data.since_id,
input_data.until_id,
input_data.sort_order,
input_data.pagination_token,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
)
if ids:
yield "ids", ids
if texts:
yield "texts", texts
if user_ids:
yield "userIds", user_ids
if user_names:
yield "userNames", user_names
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetHomeTimelineBlock(Block):
"""
Returns a collection of the most recent Tweets and Retweets posted by you and users you follow
"""
class Input(TweetExpansionInputs, TweetTimeWindowInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "offline.access"]
)
max_results: int | None = SchemaField(
description="Number of tweets to retrieve (5-100)",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for pagination", default="", advanced=True
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
ids: list[str] = SchemaField(description="List of Tweet IDs")
texts: list[str] = SchemaField(description="All Tweet texts")
userIds: list[str] = SchemaField(
description="List of user ids that authored the tweets"
)
userNames: list[str] = SchemaField(
description="List of user names that authored the tweets"
)
next_token: str = SchemaField(description="Next token for pagination")
# Complete Outputs for advanced use
data: list[dict] = SchemaField(description="Complete Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(
description="Provides metadata such as pagination info (next_token) or result counts"
)
# error
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="d222a070-a630-11ef-a18a-3f52f76c6962",
description="This block retrieves the authenticated user's home timeline.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetHomeTimelineBlock.Input,
output_schema=TwitterGetHomeTimelineBlock.Output,
test_input={
"credentials": TEST_CREDENTIALS_INPUT,
"max_results": 2,
"start_time": "2024-12-14T18:30:00.000Z",
"end_time": "2024-12-17T18:30:00.000Z",
"since_id": None,
"until_id": None,
"sort_order": None,
"pagination_token": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["1373001119480344583", "1372627771717869568"]),
("texts", ["Test tweet 1", "Test tweet 2"]),
("userIds", ["67890", "67891"]),
("userNames", ["testuser1", "testuser2"]),
(
"data",
[
{"id": "1373001119480344583", "text": "Test tweet 1"},
{"id": "1372627771717869568", "text": "Test tweet 2"},
],
),
],
test_mock={
"get_timeline": lambda *args, **kwargs: (
["1373001119480344583", "1372627771717869568"],
["Test tweet 1", "Test tweet 2"],
["67890", "67891"],
["testuser1", "testuser2"],
[
{"id": "1373001119480344583", "text": "Test tweet 1"},
{"id": "1372627771717869568", "text": "Test tweet 2"},
],
{},
{},
None,
)
},
)
@staticmethod
def get_timeline(
credentials: TwitterCredentials,
max_results: int | None,
start_time: datetime | None,
end_time: datetime | None,
since_id: str | None,
until_id: str | None,
sort_order: str | None,
pagination: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"max_results": max_results,
"pagination_token": None if pagination == "" else pagination,
"user_auth": False,
}
# Adding expansions to params If required by the user
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
# Adding time window to params If required by the user
params = (
TweetDurationBuilder(params)
.add_start_time(start_time)
.add_end_time(end_time)
.add_since_id(since_id)
.add_until_id(until_id)
.add_sort_order(sort_order)
.build()
)
response = cast(
Response,
client.get_home_timeline(**params),
)
if not response.data and not response.meta:
raise Exception("No tweets found")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
meta = response.meta or {}
next_token = meta.get("next_token", "")
tweet_ids = []
tweet_texts = []
user_ids = []
user_names = []
if response.data:
tweet_ids = [str(tweet.id) for tweet in response.data]
tweet_texts = [tweet.text for tweet in response.data]
if "users" in included:
user_ids = [str(user["id"]) for user in included["users"]]
user_names = [user["username"] for user in included["users"]]
return (
tweet_ids,
tweet_texts,
user_ids,
user_names,
data,
included,
meta,
next_token,
)
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, texts, user_ids, user_names, data, included, meta, next_token = (
self.get_timeline(
credentials,
input_data.max_results,
input_data.start_time,
input_data.end_time,
input_data.since_id,
input_data.until_id,
input_data.sort_order,
input_data.pagination_token,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
)
if ids:
yield "ids", ids
if texts:
yield "texts", texts
if user_ids:
yield "userIds", user_ids
if user_names:
yield "userNames", user_names
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetUserTweetsBlock(Block):
"""
Returns Tweets composed by a single user, specified by the requested user ID
"""
class Input(TweetExpansionInputs, TweetTimeWindowInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "offline.access"]
)
user_id: str = SchemaField(
description="Unique identifier of the Twitter account (user ID) for whom to return results",
placeholder="Enter user ID",
)
max_results: int | None = SchemaField(
description="Number of tweets to retrieve (5-100)",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for pagination", default="", advanced=True
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
ids: list[str] = SchemaField(description="List of Tweet IDs")
texts: list[str] = SchemaField(description="All Tweet texts")
userIds: list[str] = SchemaField(
description="List of user ids that authored the tweets"
)
userNames: list[str] = SchemaField(
description="List of user names that authored the tweets"
)
next_token: str = SchemaField(description="Next token for pagination")
# Complete Outputs for advanced use
data: list[dict] = SchemaField(description="Complete Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(
description="Provides metadata such as pagination info (next_token) or result counts"
)
# error
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="c44c3ef2-a630-11ef-9ff7-eb7b5ea3a5cb",
description="This block retrieves Tweets composed by a single user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetUserTweetsBlock.Input,
output_schema=TwitterGetUserTweetsBlock.Output,
test_input={
"user_id": "12345",
"credentials": TEST_CREDENTIALS_INPUT,
"max_results": 2,
"start_time": "2024-12-14T18:30:00.000Z",
"end_time": "2024-12-17T18:30:00.000Z",
"since_id": None,
"until_id": None,
"sort_order": None,
"pagination_token": None,
"expansions": None,
"media_fields": None,
"place_fields": None,
"poll_fields": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["1373001119480344583", "1372627771717869568"]),
("texts", ["Test tweet 1", "Test tweet 2"]),
("userIds", ["67890", "67891"]),
("userNames", ["testuser1", "testuser2"]),
(
"data",
[
{"id": "1373001119480344583", "text": "Test tweet 1"},
{"id": "1372627771717869568", "text": "Test tweet 2"},
],
),
],
test_mock={
"get_user_tweets": lambda *args, **kwargs: (
["1373001119480344583", "1372627771717869568"],
["Test tweet 1", "Test tweet 2"],
["67890", "67891"],
["testuser1", "testuser2"],
[
{"id": "1373001119480344583", "text": "Test tweet 1"},
{"id": "1372627771717869568", "text": "Test tweet 2"},
],
{},
{},
None,
)
},
)
@staticmethod
def get_user_tweets(
credentials: TwitterCredentials,
user_id: str,
max_results: int | None,
start_time: datetime | None,
end_time: datetime | None,
since_id: str | None,
until_id: str | None,
sort_order: str | None,
pagination: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": user_id,
"max_results": max_results,
"pagination_token": None if pagination == "" else pagination,
"user_auth": False,
}
# Adding expansions to params If required by the user
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
# Adding time window to params If required by the user
params = (
TweetDurationBuilder(params)
.add_start_time(start_time)
.add_end_time(end_time)
.add_since_id(since_id)
.add_until_id(until_id)
.add_sort_order(sort_order)
.build()
)
response = cast(
Response,
client.get_users_tweets(**params),
)
if not response.data and not response.meta:
raise Exception("No tweets found")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
meta = response.meta or {}
next_token = meta.get("next_token", "")
tweet_ids = []
tweet_texts = []
user_ids = []
user_names = []
if response.data:
tweet_ids = [str(tweet.id) for tweet in response.data]
tweet_texts = [tweet.text for tweet in response.data]
if "users" in included:
user_ids = [str(user["id"]) for user in included["users"]]
user_names = [user["username"] for user in included["users"]]
return (
tweet_ids,
tweet_texts,
user_ids,
user_names,
data,
included,
meta,
next_token,
)
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, texts, user_ids, user_names, data, included, meta, next_token = (
self.get_user_tweets(
credentials,
input_data.user_id,
input_data.max_results,
input_data.start_time,
input_data.end_time,
input_data.since_id,
input_data.until_id,
input_data.sort_order,
input_data.pagination_token,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
)
if ids:
yield "ids", ids
if texts:
yield "texts", texts
if user_ids:
yield "userIds", user_ids
if user_names:
yield "userNames", user_names
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,361 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import TweetExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
ExpansionFilter,
TweetExpansionInputs,
TweetFieldsFilter,
TweetMediaFieldsFilter,
TweetPlaceFieldsFilter,
TweetPollFieldsFilter,
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterGetTweetBlock(Block):
"""
Returns information about a single Tweet specified by the requested ID
"""
class Input(TweetExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "offline.access"]
)
tweet_id: str = SchemaField(
description="Unique identifier of the Tweet to request (ex: 1460323737035677698)",
placeholder="Enter tweet ID",
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
id: str = SchemaField(description="Tweet ID")
text: str = SchemaField(description="Tweet text")
userId: str = SchemaField(description="ID of the tweet author")
userName: str = SchemaField(description="Username of the tweet author")
# Complete Outputs for advanced use
data: dict = SchemaField(description="Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(description="Metadata about the tweet")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="f5155c3a-a630-11ef-9cc1-a309988b4d92",
description="This block retrieves information about a specific Tweet.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetTweetBlock.Input,
output_schema=TwitterGetTweetBlock.Output,
test_input={
"tweet_id": "1460323737035677698",
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"media_fields": None,
"place_fields": None,
"poll_fields": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", "1460323737035677698"),
("text", "Test tweet content"),
("userId", "12345"),
("userName", "testuser"),
("data", {"id": "1460323737035677698", "text": "Test tweet content"}),
("included", {"users": [{"id": "12345", "username": "testuser"}]}),
("meta", {"result_count": 1}),
],
test_mock={
"get_tweet": lambda *args, **kwargs: (
{"id": "1460323737035677698", "text": "Test tweet content"},
{"users": [{"id": "12345", "username": "testuser"}]},
{"result_count": 1},
"12345",
"testuser",
)
},
)
@staticmethod
def get_tweet(
credentials: TwitterCredentials,
tweet_id: str,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {"id": tweet_id, "user_auth": False}
# Adding expansions to params If required by the user
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_tweet(**params))
meta = {}
user_id = ""
user_name = ""
if response.meta:
meta = response.meta
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_dict(response.data)
if included and "users" in included:
user_id = str(included["users"][0]["id"])
user_name = included["users"][0]["username"]
if response.data:
return data, included, meta, user_id, user_name
raise Exception("Tweet not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
tweet_data, included, meta, user_id, user_name = self.get_tweet(
credentials,
input_data.tweet_id,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
yield "id", str(tweet_data["id"])
yield "text", tweet_data["text"]
if user_id:
yield "userId", user_id
if user_name:
yield "userName", user_name
yield "data", tweet_data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetTweetsBlock(Block):
"""
Returns information about multiple Tweets specified by the requested IDs
"""
class Input(TweetExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["tweet.read", "users.read", "offline.access"]
)
tweet_ids: list[str] = SchemaField(
description="List of Tweet IDs to request (up to 100)",
placeholder="Enter tweet IDs",
)
class Output(BlockSchema):
# Common Outputs that user commonly uses
ids: list[str] = SchemaField(description="All Tweet IDs")
texts: list[str] = SchemaField(description="All Tweet texts")
userIds: list[str] = SchemaField(
description="List of user ids that authored the tweets"
)
userNames: list[str] = SchemaField(
description="List of user names that authored the tweets"
)
# Complete Outputs for advanced use
data: list[dict] = SchemaField(description="Complete Tweet data")
included: dict = SchemaField(
description="Additional data that you have requested (Optional) via Expansions field"
)
meta: dict = SchemaField(description="Metadata about the tweets")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="e7cc5420-a630-11ef-bfaf-13bdd8096a51",
description="This block retrieves information about multiple Tweets.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetTweetsBlock.Input,
output_schema=TwitterGetTweetsBlock.Output,
test_input={
"tweet_ids": ["1460323737035677698"],
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"media_fields": None,
"place_fields": None,
"poll_fields": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["1460323737035677698"]),
("texts", ["Test tweet content"]),
("userIds", ["67890"]),
("userNames", ["testuser1"]),
("data", [{"id": "1460323737035677698", "text": "Test tweet content"}]),
("included", {"users": [{"id": "67890", "username": "testuser1"}]}),
("meta", {"result_count": 1}),
],
test_mock={
"get_tweets": lambda *args, **kwargs: (
["1460323737035677698"], # ids
["Test tweet content"], # texts
["67890"], # user_ids
["testuser1"], # user_names
[
{"id": "1460323737035677698", "text": "Test tweet content"}
], # data
{"users": [{"id": "67890", "username": "testuser1"}]}, # included
{"result_count": 1}, # meta
)
},
)
@staticmethod
def get_tweets(
credentials: TwitterCredentials,
tweet_ids: list[str],
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
place_fields: TweetPlaceFieldsFilter | None,
poll_fields: TweetPollFieldsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {"ids": tweet_ids, "user_auth": False}
# Adding expansions to params If required by the user
params = (
TweetExpansionsBuilder(params)
.add_expansions(expansions)
.add_media_fields(media_fields)
.add_place_fields(place_fields)
.add_poll_fields(poll_fields)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_tweets(**params))
if not response.data and not response.meta:
raise Exception("No tweets found")
tweet_ids = []
tweet_texts = []
user_ids = []
user_names = []
meta = {}
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
tweet_ids = [str(tweet.id) for tweet in response.data]
tweet_texts = [tweet.text for tweet in response.data]
if included and "users" in included:
for user in included["users"]:
user_ids.append(str(user["id"]))
user_names.append(user["username"])
if response.meta:
meta = response.meta
return tweet_ids, tweet_texts, user_ids, user_names, data, included, meta
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, texts, user_ids, user_names, data, included, meta = self.get_tweets(
credentials,
input_data.tweet_ids,
input_data.expansions,
input_data.media_fields,
input_data.place_fields,
input_data.poll_fields,
input_data.tweet_fields,
input_data.user_fields,
)
if ids:
yield "ids", ids
if texts:
yield "texts", texts
if user_ids:
yield "userIds", user_ids
if user_names:
yield "userNames", user_names
if data:
yield "data", data
if included:
yield "included", included
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,305 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import UserExpansionsBuilder
from backend.blocks.twitter._serializer import IncludesSerializer
from backend.blocks.twitter._types import (
TweetFieldsFilter,
TweetUserFieldsFilter,
UserExpansionInputs,
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterUnblockUserBlock(Block):
"""
Unblock a specific user on Twitter. The request succeeds with no action when the user sends a request to a user they're not blocking or have already unblocked.
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["block.write", "users.read", "offline.access"]
)
target_user_id: str = SchemaField(
description="The user ID of the user that you would like to unblock",
placeholder="Enter target user ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the unblock was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="0f1b6570-a631-11ef-a3ea-230cbe9650dd",
description="This block unblocks a specific user on Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterUnblockUserBlock.Input,
output_schema=TwitterUnblockUserBlock.Output,
test_input={
"target_user_id": "12345",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"unblock_user": lambda *args, **kwargs: True},
)
@staticmethod
def unblock_user(credentials: TwitterCredentials, target_user_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.unblock(target_user_id=target_user_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.unblock_user(credentials, input_data.target_user_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetBlockedUsersBlock(Block):
"""
Get a list of users who are blocked by the authenticating user
"""
class Input(UserExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["users.read", "offline.access", "block.read"]
)
max_results: int | None = SchemaField(
description="Maximum number of results to return (1-1000, default 100)",
placeholder="Enter max results",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for retrieving next/previous page of results",
placeholder="Enter pagination token",
default="",
advanced=True,
)
class Output(BlockSchema):
user_ids: list[str] = SchemaField(description="List of blocked user IDs")
usernames_: list[str] = SchemaField(description="List of blocked usernames")
included: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Metadata including pagination info")
next_token: str = SchemaField(description="Next token for pagination")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="05f409e8-a631-11ef-ae89-93de863ee30d",
description="This block retrieves a list of users blocked by the authenticating user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetBlockedUsersBlock.Input,
output_schema=TwitterGetBlockedUsersBlock.Output,
test_input={
"max_results": 10,
"pagination_token": "",
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("user_ids", ["12345", "67890"]),
("usernames_", ["testuser1", "testuser2"]),
],
test_mock={
"get_blocked_users": lambda *args, **kwargs: (
{}, # included
{}, # meta
["12345", "67890"], # user_ids
["testuser1", "testuser2"], # usernames
None, # next_token
)
},
)
@staticmethod
def get_blocked_users(
credentials: TwitterCredentials,
max_results: int | None,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
UserExpansionsBuilder(params)
.add_expansions(expansions)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_blocked(**params))
meta = {}
user_ids = []
usernames = []
next_token = None
included = IncludesSerializer.serialize(response.includes)
if response.data:
for user in response.data:
user_ids.append(str(user.id))
usernames.append(user.username)
if response.meta:
meta = response.meta
if "next_token" in meta:
next_token = meta["next_token"]
if user_ids and usernames:
return included, meta, user_ids, usernames, next_token
else:
raise tweepy.TweepyException("No blocked users found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
included, meta, user_ids, usernames, next_token = self.get_blocked_users(
credentials,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,
)
if user_ids:
yield "user_ids", user_ids
if usernames:
yield "usernames_", usernames
if included:
yield "included", included
if meta:
yield "meta", meta
if next_token:
yield "next_token", next_token
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterBlockUserBlock(Block):
"""
Block a specific user on Twitter
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["block.write", "users.read", "offline.access"]
)
target_user_id: str = SchemaField(
description="The user ID of the user that you would like to block",
placeholder="Enter target user ID",
)
class Output(BlockSchema):
success: bool = SchemaField(description="Whether the block was successful")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="fc258b94-a630-11ef-abc3-df050b75b816",
description="This block blocks a specific user on Twitter.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterBlockUserBlock.Input,
output_schema=TwitterBlockUserBlock.Output,
test_input={
"target_user_id": "12345",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"block_user": lambda *args, **kwargs: True},
)
@staticmethod
def block_user(credentials: TwitterCredentials, target_user_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.block(target_user_id=target_user_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.block_user(credentials, input_data.target_user_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,510 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import UserExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
TweetFieldsFilter,
TweetUserFieldsFilter,
UserExpansionInputs,
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterUnfollowUserBlock(Block):
"""
Allows a user to unfollow another user specified by target user ID.
The request succeeds with no action when the authenticated user sends a request to a user they're not following or have already unfollowed.
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["users.read", "users.write", "follows.write", "offline.access"]
)
target_user_id: str = SchemaField(
description="The user ID of the user that you would like to unfollow",
placeholder="Enter target user ID",
)
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the unfollow action was successful"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="37e386a4-a631-11ef-b7bd-b78204b35fa4",
description="This block unfollows a specified Twitter user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterUnfollowUserBlock.Input,
output_schema=TwitterUnfollowUserBlock.Output,
test_input={
"target_user_id": "12345",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"unfollow_user": lambda *args, **kwargs: True},
)
@staticmethod
def unfollow_user(credentials: TwitterCredentials, target_user_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.unfollow_user(target_user_id=target_user_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.unfollow_user(credentials, input_data.target_user_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterFollowUserBlock(Block):
"""
Allows a user to follow another user specified by target user ID. If the target user does not have public Tweets,
this endpoint will send a follow request. The request succeeds with no action when the authenticated user sends a
request to a user they're already following, or if they're sending a follower request to a user that does not have
public Tweets.
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["users.read", "users.write", "follows.write", "offline.access"]
)
target_user_id: str = SchemaField(
description="The user ID of the user that you would like to follow",
placeholder="Enter target user ID",
)
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the follow action was successful"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="1aae6a5e-a631-11ef-a090-435900c6d429",
description="This block follows a specified Twitter user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterFollowUserBlock.Input,
output_schema=TwitterFollowUserBlock.Output,
test_input={
"target_user_id": "12345",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[("success", True)],
test_mock={"follow_user": lambda *args, **kwargs: True},
)
@staticmethod
def follow_user(credentials: TwitterCredentials, target_user_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.follow_user(target_user_id=target_user_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.follow_user(credentials, input_data.target_user_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetFollowersBlock(Block):
"""
Retrieves a list of followers for a specified Twitter user ID
"""
class Input(UserExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["users.read", "offline.access", "follows.read"]
)
target_user_id: str = SchemaField(
description="The user ID whose followers you would like to retrieve",
placeholder="Enter target user ID",
)
max_results: int | None = SchemaField(
description="Maximum number of results to return (1-1000, default 100)",
placeholder="Enter max results",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for retrieving next/previous page of results",
placeholder="Enter pagination token",
default="",
advanced=True,
)
class Output(BlockSchema):
ids: list[str] = SchemaField(description="List of follower user IDs")
usernames: list[str] = SchemaField(description="List of follower usernames")
next_token: str = SchemaField(description="Next token for pagination")
data: list[dict] = SchemaField(description="Complete user data for followers")
includes: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Metadata including pagination info")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="30f66410-a631-11ef-8fe7-d7f888b4f43c",
description="This block retrieves followers of a specified Twitter user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetFollowersBlock.Input,
output_schema=TwitterGetFollowersBlock.Output,
test_input={
"target_user_id": "12345",
"max_results": 1,
"pagination_token": "",
"expansions": None,
"tweet_fields": None,
"user_fields": None,
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["1234567890"]),
("usernames", ["testuser"]),
("data", [{"id": "1234567890", "username": "testuser"}]),
],
test_mock={
"get_followers": lambda *args, **kwargs: (
["1234567890"],
["testuser"],
[{"id": "1234567890", "username": "testuser"}],
{},
{},
None,
)
},
)
@staticmethod
def get_followers(
credentials: TwitterCredentials,
target_user_id: str,
max_results: int | None,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": target_user_id,
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
UserExpansionsBuilder(params)
.add_expansions(expansions)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_users_followers(**params))
meta = {}
follower_ids = []
follower_usernames = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
follower_ids = [str(user.id) for user in response.data]
follower_usernames = [user.username for user in response.data]
return (
follower_ids,
follower_usernames,
data,
included,
meta,
next_token,
)
raise Exception("Followers not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, usernames, data, includes, meta, next_token = self.get_followers(
credentials,
input_data.target_user_id,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,
)
if ids:
yield "ids", ids
if usernames:
yield "usernames", usernames
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if includes:
yield "includes", includes
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetFollowingBlock(Block):
"""
Retrieves a list of users that a specified Twitter user ID is following
"""
class Input(UserExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["users.read", "offline.access", "follows.read"]
)
target_user_id: str = SchemaField(
description="The user ID whose following you would like to retrieve",
placeholder="Enter target user ID",
)
max_results: int | None = SchemaField(
description="Maximum number of results to return (1-1000, default 100)",
placeholder="Enter max results",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token for retrieving next/previous page of results",
placeholder="Enter pagination token",
default="",
advanced=True,
)
class Output(BlockSchema):
ids: list[str] = SchemaField(description="List of following user IDs")
usernames: list[str] = SchemaField(description="List of following usernames")
next_token: str = SchemaField(description="Next token for pagination")
data: list[dict] = SchemaField(description="Complete user data for following")
includes: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Metadata including pagination info")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="264a399c-a631-11ef-a97d-bfde4ca91173",
description="This block retrieves the users that a specified Twitter user is following.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetFollowingBlock.Input,
output_schema=TwitterGetFollowingBlock.Output,
test_input={
"target_user_id": "12345",
"max_results": 1,
"pagination_token": None,
"expansions": None,
"tweet_fields": None,
"user_fields": None,
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["1234567890"]),
("usernames", ["testuser"]),
("data", [{"id": "1234567890", "username": "testuser"}]),
],
test_mock={
"get_following": lambda *args, **kwargs: (
["1234567890"],
["testuser"],
[{"id": "1234567890", "username": "testuser"}],
{},
{},
None,
)
},
)
@staticmethod
def get_following(
credentials: TwitterCredentials,
target_user_id: str,
max_results: int | None,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": target_user_id,
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
UserExpansionsBuilder(params)
.add_expansions(expansions)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_users_following(**params))
meta = {}
following_ids = []
following_usernames = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
following_ids = [str(user.id) for user in response.data]
following_usernames = [user.username for user in response.data]
return (
following_ids,
following_usernames,
data,
included,
meta,
next_token,
)
raise Exception("Following not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, usernames, data, includes, meta, next_token = self.get_following(
credentials,
input_data.target_user_id,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,
)
if ids:
yield "ids", ids
if usernames:
yield "usernames", usernames
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if includes:
yield "includes", includes
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,328 @@
from typing import cast
import tweepy
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import UserExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
TweetFieldsFilter,
TweetUserFieldsFilter,
UserExpansionInputs,
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class TwitterUnmuteUserBlock(Block):
"""
Allows a user to unmute another user specified by target user ID.
The request succeeds with no action when the user sends a request to a user they're not muting or have already unmuted.
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["users.read", "users.write", "offline.access"]
)
target_user_id: str = SchemaField(
description="The user ID of the user that you would like to unmute",
placeholder="Enter target user ID",
)
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the unmute action was successful"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="40458504-a631-11ef-940b-eff92be55422",
description="This block unmutes a specified Twitter user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterUnmuteUserBlock.Input,
output_schema=TwitterUnmuteUserBlock.Output,
test_input={
"target_user_id": "12345",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"unmute_user": lambda *args, **kwargs: True},
)
@staticmethod
def unmute_user(credentials: TwitterCredentials, target_user_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.unmute(target_user_id=target_user_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.unmute_user(credentials, input_data.target_user_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterGetMutedUsersBlock(Block):
"""
Returns a list of users who are muted by the authenticating user
"""
class Input(UserExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["users.read", "offline.access"]
)
max_results: int | None = SchemaField(
description="The maximum number of results to be returned per page (1-1000). Default is 100.",
placeholder="Enter max results",
default=10,
advanced=True,
)
pagination_token: str | None = SchemaField(
description="Token to request next/previous page of results",
placeholder="Enter pagination token",
default="",
advanced=True,
)
class Output(BlockSchema):
ids: list[str] = SchemaField(description="List of muted user IDs")
usernames: list[str] = SchemaField(description="List of muted usernames")
next_token: str = SchemaField(description="Next token for pagination")
data: list[dict] = SchemaField(description="Complete user data for muted users")
includes: dict = SchemaField(
description="Additional data requested via expansions"
)
meta: dict = SchemaField(description="Metadata including pagination info")
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="475024da-a631-11ef-9ccd-f724b8b03cda",
description="This block gets a list of users muted by the authenticating user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetMutedUsersBlock.Input,
output_schema=TwitterGetMutedUsersBlock.Output,
test_input={
"max_results": 2,
"pagination_token": "",
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["12345", "67890"]),
("usernames", ["testuser1", "testuser2"]),
(
"data",
[
{"id": "12345", "username": "testuser1"},
{"id": "67890", "username": "testuser2"},
],
),
],
test_mock={
"get_muted_users": lambda *args, **kwargs: (
["12345", "67890"],
["testuser1", "testuser2"],
[
{"id": "12345", "username": "testuser1"},
{"id": "67890", "username": "testuser2"},
],
{},
{},
None,
)
},
)
@staticmethod
def get_muted_users(
credentials: TwitterCredentials,
max_results: int | None,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"max_results": max_results,
"pagination_token": (
None if pagination_token == "" else pagination_token
),
"user_auth": False,
}
params = (
UserExpansionsBuilder(params)
.add_expansions(expansions)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_muted(**params))
meta = {}
user_ids = []
usernames = []
next_token = None
if response.meta:
meta = response.meta
next_token = meta.get("next_token")
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
user_ids = [str(item.id) for item in response.data]
usernames = [item.username for item in response.data]
return user_ids, usernames, data, included, meta, next_token
raise Exception("Muted users not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
ids, usernames, data, includes, meta, next_token = self.get_muted_users(
credentials,
input_data.max_results,
input_data.pagination_token,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,
)
if ids:
yield "ids", ids
if usernames:
yield "usernames", usernames
if next_token:
yield "next_token", next_token
if data:
yield "data", data
if includes:
yield "includes", includes
if meta:
yield "meta", meta
except Exception as e:
yield "error", handle_tweepy_exception(e)
class TwitterMuteUserBlock(Block):
"""
Allows a user to mute another user specified by target user ID
"""
class Input(BlockSchema):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["users.read", "users.write", "offline.access"]
)
target_user_id: str = SchemaField(
description="The user ID of the user that you would like to mute",
placeholder="Enter target user ID",
)
class Output(BlockSchema):
success: bool = SchemaField(
description="Whether the mute action was successful"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="4d1919d0-a631-11ef-90ab-3b73af9ce8f1",
description="This block mutes a specified Twitter user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterMuteUserBlock.Input,
output_schema=TwitterMuteUserBlock.Output,
test_input={
"target_user_id": "12345",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("success", True),
],
test_mock={"mute_user": lambda *args, **kwargs: True},
)
@staticmethod
def mute_user(credentials: TwitterCredentials, target_user_id: str):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
client.mute(target_user_id=target_user_id, user_auth=False)
return True
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
success = self.mute_user(credentials, input_data.target_user_id)
yield "success", success
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -0,0 +1,383 @@
from typing import Literal, Union, cast
import tweepy
from pydantic import BaseModel
from tweepy.client import Response
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
TwitterCredentials,
TwitterCredentialsField,
TwitterCredentialsInput,
)
from backend.blocks.twitter._builders import UserExpansionsBuilder
from backend.blocks.twitter._serializer import (
IncludesSerializer,
ResponseDataSerializer,
)
from backend.blocks.twitter._types import (
TweetFieldsFilter,
TweetUserFieldsFilter,
UserExpansionInputs,
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class UserId(BaseModel):
discriminator: Literal["user_id"]
user_id: str = SchemaField(description="The ID of the user to lookup", default="")
class Username(BaseModel):
discriminator: Literal["username"]
username: str = SchemaField(
description="The Twitter username (handle) of the user", default=""
)
class TwitterGetUserBlock(Block):
"""
Gets information about a single Twitter user specified by ID or username
"""
class Input(UserExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["users.read", "offline.access"]
)
identifier: Union[UserId, Username] = SchemaField(
discriminator="discriminator",
description="Choose whether to identify the user by their unique Twitter ID or by their username",
advanced=False,
)
class Output(BlockSchema):
# Common outputs
id: str = SchemaField(description="User ID")
username_: str = SchemaField(description="User username")
name_: str = SchemaField(description="User name")
# Complete outputs
data: dict = SchemaField(description="Complete user data")
included: dict = SchemaField(
description="Additional data requested via expansions"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="5446db8e-a631-11ef-812a-cf315d373ee9",
description="This block retrieves information about a specified Twitter user.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetUserBlock.Input,
output_schema=TwitterGetUserBlock.Output,
test_input={
"identifier": {"discriminator": "username", "username": "twitter"},
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("id", "783214"),
("username_", "twitter"),
("name_", "Twitter"),
(
"data",
{
"user": {
"id": "783214",
"username": "twitter",
"name": "Twitter",
}
},
),
],
test_mock={
"get_user": lambda *args, **kwargs: (
{
"user": {
"id": "783214",
"username": "twitter",
"name": "Twitter",
}
},
{},
"twitter",
"783214",
"Twitter",
)
},
)
@staticmethod
def get_user(
credentials: TwitterCredentials,
identifier: Union[UserId, Username],
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"id": identifier.user_id if isinstance(identifier, UserId) else None,
"username": (
identifier.username if isinstance(identifier, Username) else None
),
"user_auth": False,
}
params = (
UserExpansionsBuilder(params)
.add_expansions(expansions)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_user(**params))
username = ""
id = ""
name = ""
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_dict(response.data)
if response.data:
username = response.data.username
id = str(response.data.id)
name = response.data.name
if username and id:
return data, included, username, id, name
else:
raise tweepy.TweepyException("User not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
data, included, username, id, name = self.get_user(
credentials,
input_data.identifier,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,
)
if id:
yield "id", id
if username:
yield "username_", username
if name:
yield "name_", name
if data:
yield "data", data
if included:
yield "included", included
except Exception as e:
yield "error", handle_tweepy_exception(e)
class UserIdList(BaseModel):
discriminator: Literal["user_id_list"]
user_ids: list[str] = SchemaField(
description="List of user IDs to lookup (max 100)",
placeholder="Enter user IDs",
default=[],
advanced=False,
)
class UsernameList(BaseModel):
discriminator: Literal["username_list"]
usernames: list[str] = SchemaField(
description="List of Twitter usernames/handles to lookup (max 100)",
placeholder="Enter usernames",
default=[],
advanced=False,
)
class TwitterGetUsersBlock(Block):
"""
Gets information about multiple Twitter users specified by IDs or usernames
"""
class Input(UserExpansionInputs):
credentials: TwitterCredentialsInput = TwitterCredentialsField(
["users.read", "offline.access"]
)
identifier: Union[UserIdList, UsernameList] = SchemaField(
discriminator="discriminator",
description="Choose whether to identify users by their unique Twitter IDs or by their usernames",
advanced=False,
)
class Output(BlockSchema):
# Common outputs
ids: list[str] = SchemaField(description="User IDs")
usernames_: list[str] = SchemaField(description="User usernames")
names_: list[str] = SchemaField(description="User names")
# Complete outputs
data: list[dict] = SchemaField(description="Complete users data")
included: dict = SchemaField(
description="Additional data requested via expansions"
)
error: str = SchemaField(description="Error message if the request failed")
def __init__(self):
super().__init__(
id="5abc857c-a631-11ef-8cfc-f7b79354f7a1",
description="This block retrieves information about multiple Twitter users.",
categories={BlockCategory.SOCIAL},
input_schema=TwitterGetUsersBlock.Input,
output_schema=TwitterGetUsersBlock.Output,
test_input={
"identifier": {
"discriminator": "username_list",
"usernames": ["twitter", "twitterdev"],
},
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"tweet_fields": None,
"user_fields": None,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("ids", ["783214", "2244994945"]),
("usernames_", ["twitter", "twitterdev"]),
("names_", ["Twitter", "Twitter Dev"]),
(
"data",
[
{"id": "783214", "username": "twitter", "name": "Twitter"},
{
"id": "2244994945",
"username": "twitterdev",
"name": "Twitter Dev",
},
],
),
],
test_mock={
"get_users": lambda *args, **kwargs: (
[
{"id": "783214", "username": "twitter", "name": "Twitter"},
{
"id": "2244994945",
"username": "twitterdev",
"name": "Twitter Dev",
},
],
{},
["twitter", "twitterdev"],
["783214", "2244994945"],
["Twitter", "Twitter Dev"],
)
},
)
@staticmethod
def get_users(
credentials: TwitterCredentials,
identifier: Union[UserIdList, UsernameList],
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
):
try:
client = tweepy.Client(
bearer_token=credentials.access_token.get_secret_value()
)
params = {
"ids": (
",".join(identifier.user_ids)
if isinstance(identifier, UserIdList)
else None
),
"usernames": (
",".join(identifier.usernames)
if isinstance(identifier, UsernameList)
else None
),
"user_auth": False,
}
params = (
UserExpansionsBuilder(params)
.add_expansions(expansions)
.add_tweet_fields(tweet_fields)
.add_user_fields(user_fields)
.build()
)
response = cast(Response, client.get_users(**params))
usernames = []
ids = []
names = []
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
for user in response.data:
usernames.append(user.username)
ids.append(str(user.id))
names.append(user.name)
if usernames and ids:
return data, included, usernames, ids, names
else:
raise tweepy.TweepyException("Users not found")
except tweepy.TweepyException:
raise
def run(
self,
input_data: Input,
*,
credentials: TwitterCredentials,
**kwargs,
) -> BlockOutput:
try:
data, included, usernames, ids, names = self.get_users(
credentials,
input_data.identifier,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,
)
if ids:
yield "ids", ids
if usernames:
yield "usernames_", usernames
if names:
yield "names_", names
if data:
yield "data", data
if included:
yield "included", included
except Exception as e:
yield "error", handle_tweepy_exception(e)

View File

@ -61,6 +61,9 @@ class BlockCategory(Enum):
HARDWARE = "Block that interacts with hardware."
AGENT = "Block that interacts with other agents."
CRM = "Block that interacts with CRM services."
SAFETY = (
"Block that provides AI safety mechanisms such as detecting harmful content"
)
def dict(self) -> dict[str, str]:
return {"category": self.name, "description": self.value}

View File

@ -629,25 +629,20 @@ async def __create_graph(tx, graph: Graph, user_id: str):
"isTemplate": graph.is_template,
"isActive": graph.is_active,
"userId": user_id,
"AgentNodes": {
"create": [
{
"id": node.id,
"agentBlockId": node.block_id,
"constantInput": json.dumps(node.input_default),
"metadata": json.dumps(node.metadata),
}
for node in graph.nodes
]
},
}
)
await asyncio.gather(
*[
AgentNode.prisma(tx).create(
{
"id": node.id,
"agentBlockId": node.block_id,
"agentGraphId": graph.id,
"agentGraphVersion": graph.version,
"constantInput": json.dumps(node.input_default),
"metadata": json.dumps(node.metadata),
}
)
for node in graph.nodes
]
)
await asyncio.gather(
*[
AgentNodeLink.prisma(tx).create(

View File

@ -140,6 +140,8 @@ def SchemaField(
exclude: bool = False,
hidden: Optional[bool] = None,
depends_on: list[str] | None = None,
image_upload: Optional[bool] = None,
image_output: Optional[bool] = None,
**kwargs,
) -> T:
if default is PydanticUndefined and default_factory is None:
@ -155,6 +157,8 @@ def SchemaField(
"advanced": advanced,
"hidden": hidden,
"depends_on": depends_on,
"image_upload": image_upload,
"image_output": image_output,
}.items()
if v is not None
}
@ -238,6 +242,7 @@ class OAuthState(BaseModel):
token: str
provider: str
expires_at: int
code_verifier: Optional[str] = None
"""Unix timestamp (seconds) indicating when this OAuth state expires"""
scopes: list[str]

View File

@ -1,6 +1,8 @@
import base64
import hashlib
import secrets
from datetime import datetime, timedelta, timezone
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Optional
from pydantic import SecretStr
@ -210,18 +212,24 @@ class IntegrationCredentialsStore:
]
self._set_user_integration_creds(user_id, filtered_credentials)
def store_state_token(self, user_id: str, provider: str, scopes: list[str]) -> str:
def store_state_token(
self, user_id: str, provider: str, scopes: list[str], use_pkce: bool = False
) -> tuple[str, str]:
token = secrets.token_urlsafe(32)
expires_at = datetime.now(timezone.utc) + timedelta(minutes=10)
(code_challenge, code_verifier) = self._generate_code_challenge()
state = OAuthState(
token=token,
provider=provider,
code_verifier=code_verifier,
expires_at=int(expires_at.timestamp()),
scopes=scopes,
)
with self.locked_user_integrations(user_id):
user_integrations = self._get_user_integrations(user_id)
oauth_states = user_integrations.oauth_states
oauth_states.append(state)
@ -231,39 +239,21 @@ class IntegrationCredentialsStore:
user_id=user_id, data=user_integrations
)
return token
return token, code_challenge
def get_any_valid_scopes_from_state_token(
def _generate_code_challenge(self) -> tuple[str, str]:
"""
Generate code challenge using SHA256 from the code verifier.
Currently only SHA256 is supported.(In future if we want to support more methods we can add them here)
"""
code_verifier = secrets.token_urlsafe(128)
sha256_hash = hashlib.sha256(code_verifier.encode("utf-8")).digest()
code_challenge = base64.urlsafe_b64encode(sha256_hash).decode("utf-8")
return code_challenge.replace("=", ""), code_verifier
def verify_state_token(
self, user_id: str, token: str, provider: str
) -> list[str]:
"""
Get the valid scopes from the OAuth state token. This will return any valid scopes
from any OAuth state token for the given provider. If no valid scopes are found,
an empty list is returned. DO NOT RELY ON THIS TOKEN TO AUTHENTICATE A USER, AS IT
IS TO CHECK IF THE USER HAS GIVEN PERMISSIONS TO THE APPLICATION BEFORE EXCHANGING
THE CODE FOR TOKENS.
"""
user_integrations = self._get_user_integrations(user_id)
oauth_states = user_integrations.oauth_states
now = datetime.now(timezone.utc)
valid_state = next(
(
state
for state in oauth_states
if state.token == token
and state.provider == provider
and state.expires_at > now.timestamp()
),
None,
)
if valid_state:
return valid_state.scopes
return []
def verify_state_token(self, user_id: str, token: str, provider: str) -> bool:
) -> Optional[OAuthState]:
with self.locked_user_integrations(user_id):
user_integrations = self._get_user_integrations(user_id)
oauth_states = user_integrations.oauth_states
@ -285,9 +275,9 @@ class IntegrationCredentialsStore:
oauth_states.remove(valid_state)
user_integrations.oauth_states = oauth_states
self.db_manager.update_user_integrations(user_id, user_integrations)
return True
return valid_state
return False
return None
def _set_user_integration_creds(
self, user_id: str, credentials: list[Credentials]

View File

@ -3,6 +3,7 @@ from typing import TYPE_CHECKING
from .github import GitHubOAuthHandler
from .google import GoogleOAuthHandler
from .notion import NotionOAuthHandler
from .twitter import TwitterOAuthHandler
if TYPE_CHECKING:
from ..providers import ProviderName
@ -15,6 +16,7 @@ HANDLERS_BY_NAME: dict["ProviderName", type["BaseOAuthHandler"]] = {
GitHubOAuthHandler,
GoogleOAuthHandler,
NotionOAuthHandler,
TwitterOAuthHandler,
]
}
# --8<-- [end:HANDLERS_BY_NAMEExample]

View File

@ -1,7 +1,7 @@
import logging
import time
from abc import ABC, abstractmethod
from typing import ClassVar
from typing import ClassVar, Optional
from backend.data.model import OAuth2Credentials
from backend.integrations.providers import ProviderName
@ -23,7 +23,9 @@ class BaseOAuthHandler(ABC):
@abstractmethod
# --8<-- [start:BaseOAuthHandler3]
def get_login_url(self, scopes: list[str], state: str) -> str:
def get_login_url(
self, scopes: list[str], state: str, code_challenge: Optional[str]
) -> str:
# --8<-- [end:BaseOAuthHandler3]
"""Constructs a login URL that the user can be redirected to"""
...
@ -31,7 +33,7 @@ class BaseOAuthHandler(ABC):
@abstractmethod
# --8<-- [start:BaseOAuthHandler4]
def exchange_code_for_tokens(
self, code: str, scopes: list[str]
self, code: str, scopes: list[str], code_verifier: Optional[str]
) -> OAuth2Credentials:
# --8<-- [end:BaseOAuthHandler4]
"""Exchanges the acquired authorization code from login for a set of tokens"""

View File

@ -34,7 +34,9 @@ class GitHubOAuthHandler(BaseOAuthHandler):
self.token_url = "https://github.com/login/oauth/access_token"
self.revoke_url = "https://api.github.com/applications/{client_id}/token"
def get_login_url(self, scopes: list[str], state: str) -> str:
def get_login_url(
self, scopes: list[str], state: str, code_challenge: Optional[str]
) -> str:
params = {
"client_id": self.client_id,
"redirect_uri": self.redirect_uri,
@ -44,7 +46,7 @@ class GitHubOAuthHandler(BaseOAuthHandler):
return f"{self.auth_base_url}?{urlencode(params)}"
def exchange_code_for_tokens(
self, code: str, scopes: list[str]
self, code: str, scopes: list[str], code_verifier: Optional[str]
) -> OAuth2Credentials:
return self._request_tokens({"code": code, "redirect_uri": self.redirect_uri})

View File

@ -1,4 +1,5 @@
import logging
from typing import Optional
from google.auth.external_account_authorized_user import (
Credentials as ExternalAccountCredentials,
@ -38,7 +39,9 @@ class GoogleOAuthHandler(BaseOAuthHandler):
self.token_uri = "https://oauth2.googleapis.com/token"
self.revoke_uri = "https://oauth2.googleapis.com/revoke"
def get_login_url(self, scopes: list[str], state: str) -> str:
def get_login_url(
self, scopes: list[str], state: str, code_challenge: Optional[str]
) -> str:
all_scopes = list(set(scopes + self.DEFAULT_SCOPES))
logger.debug(f"Setting up OAuth flow with scopes: {all_scopes}")
flow = self._setup_oauth_flow(all_scopes)
@ -52,7 +55,7 @@ class GoogleOAuthHandler(BaseOAuthHandler):
return authorization_url
def exchange_code_for_tokens(
self, code: str, scopes: list[str]
self, code: str, scopes: list[str], code_verifier: Optional[str]
) -> OAuth2Credentials:
logger.debug(f"Exchanging code for tokens with scopes: {scopes}")

View File

@ -1,4 +1,5 @@
from base64 import b64encode
from typing import Optional
from urllib.parse import urlencode
from backend.data.model import OAuth2Credentials
@ -26,7 +27,9 @@ class NotionOAuthHandler(BaseOAuthHandler):
self.auth_base_url = "https://api.notion.com/v1/oauth/authorize"
self.token_url = "https://api.notion.com/v1/oauth/token"
def get_login_url(self, scopes: list[str], state: str) -> str:
def get_login_url(
self, scopes: list[str], state: str, code_challenge: Optional[str]
) -> str:
params = {
"client_id": self.client_id,
"redirect_uri": self.redirect_uri,
@ -37,7 +40,7 @@ class NotionOAuthHandler(BaseOAuthHandler):
return f"{self.auth_base_url}?{urlencode(params)}"
def exchange_code_for_tokens(
self, code: str, scopes: list[str]
self, code: str, scopes: list[str], code_verifier: Optional[str]
) -> OAuth2Credentials:
request_body = {
"grant_type": "authorization_code",

View File

@ -0,0 +1,171 @@
import time
import urllib.parse
from typing import ClassVar, Optional
import requests
from backend.data.model import OAuth2Credentials, ProviderName
from backend.integrations.oauth.base import BaseOAuthHandler
class TwitterOAuthHandler(BaseOAuthHandler):
PROVIDER_NAME = ProviderName.TWITTER
DEFAULT_SCOPES: ClassVar[list[str]] = [
"tweet.read",
"tweet.write",
"tweet.moderate.write",
"users.read",
"follows.read",
"follows.write",
"offline.access",
"space.read",
"mute.read",
"mute.write",
"like.read",
"like.write",
"list.read",
"list.write",
"block.read",
"block.write",
"bookmark.read",
"bookmark.write",
]
AUTHORIZE_URL = "https://twitter.com/i/oauth2/authorize"
TOKEN_URL = "https://api.x.com/2/oauth2/token"
USERNAME_URL = "https://api.x.com/2/users/me"
REVOKE_URL = "https://api.x.com/2/oauth2/revoke"
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
self.client_id = client_id
self.client_secret = client_secret
self.redirect_uri = redirect_uri
def get_login_url(
self, scopes: list[str], state: str, code_challenge: Optional[str]
) -> str:
"""Generate Twitter OAuth 2.0 authorization URL"""
# scopes = self.handle_default_scopes(scopes)
if code_challenge is None:
raise ValueError("code_challenge is required for Twitter OAuth")
params = {
"response_type": "code",
"client_id": self.client_id,
"redirect_uri": self.redirect_uri,
"scope": " ".join(self.DEFAULT_SCOPES),
"state": state,
"code_challenge": code_challenge,
"code_challenge_method": "S256",
}
return f"{self.AUTHORIZE_URL}?{urllib.parse.urlencode(params)}"
def exchange_code_for_tokens(
self, code: str, scopes: list[str], code_verifier: Optional[str]
) -> OAuth2Credentials:
"""Exchange authorization code for access tokens"""
headers = {"Content-Type": "application/x-www-form-urlencoded"}
data = {
"code": code,
"grant_type": "authorization_code",
"redirect_uri": self.redirect_uri,
"code_verifier": code_verifier,
}
auth = (self.client_id, self.client_secret)
response = requests.post(self.TOKEN_URL, headers=headers, data=data, auth=auth)
response.raise_for_status()
tokens = response.json()
username = self._get_username(tokens["access_token"])
return OAuth2Credentials(
provider=self.PROVIDER_NAME,
title=None,
username=username,
access_token=tokens["access_token"],
refresh_token=tokens.get("refresh_token"),
access_token_expires_at=int(time.time()) + tokens["expires_in"],
refresh_token_expires_at=None,
scopes=scopes,
)
def _get_username(self, access_token: str) -> str:
"""Get the username from the access token"""
headers = {"Authorization": f"Bearer {access_token}"}
params = {"user.fields": "username"}
response = requests.get(
f"{self.USERNAME_URL}?{urllib.parse.urlencode(params)}", headers=headers
)
response.raise_for_status()
return response.json()["data"]["username"]
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
"""Refresh access tokens using refresh token"""
if not credentials.refresh_token:
raise ValueError("No refresh token available")
header = {"Content-Type": "application/x-www-form-urlencoded"}
data = {
"grant_type": "refresh_token",
"refresh_token": credentials.refresh_token.get_secret_value(),
}
auth = (self.client_id, self.client_secret)
response = requests.post(self.TOKEN_URL, headers=header, data=data, auth=auth)
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e:
print("HTTP Error:", e)
print("Response Content:", response.text)
raise
tokens = response.json()
username = self._get_username(tokens["access_token"])
return OAuth2Credentials(
id=credentials.id,
provider=self.PROVIDER_NAME,
title=None,
username=username,
access_token=tokens["access_token"],
refresh_token=tokens["refresh_token"],
access_token_expires_at=int(time.time()) + tokens["expires_in"],
scopes=credentials.scopes,
refresh_token_expires_at=None,
)
def revoke_tokens(self, credentials: OAuth2Credentials) -> bool:
"""Revoke the access token"""
header = {"Content-Type": "application/x-www-form-urlencoded"}
data = {
"token": credentials.access_token.get_secret_value(),
"token_type_hint": "access_token",
}
auth = (self.client_id, self.client_secret)
response = requests.post(self.REVOKE_URL, headers=header, data=data, auth=auth)
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e:
print("HTTP Error:", e)
print("Response Content:", response.text)
raise
return response.status_code == 200

View File

@ -19,6 +19,7 @@ class ProviderName(str, Enum):
JINA = "jina"
MEDIUM = "medium"
NOTION = "notion"
NVIDIA = "nvidia"
OLLAMA = "ollama"
OPENAI = "openai"
OPENWEATHERMAP = "openweathermap"
@ -28,5 +29,6 @@ class ProviderName(str, Enum):
REPLICATE = "replicate"
REVID = "revid"
SLANT3D = "slant3d"
TWITTER = "twitter"
UNREAL_SPEECH = "unreal_speech"
# --8<-- [end:ProviderName]

View File

@ -7,6 +7,6 @@ app_config = Config()
# TODO: add test to assert this matches the actual API route
def webhook_ingress_url(provider_name: ProviderName, webhook_id: str) -> str:
return (
f"{app_config.platform_base_url}/api/integrations/{provider_name}"
f"{app_config.platform_base_url}/api/integrations/{provider_name.value}"
f"/webhooks/{webhook_id}/ingress"
)

View File

@ -55,11 +55,12 @@ def login(
requested_scopes = scopes.split(",") if scopes else []
# Generate and store a secure random state token along with the scopes
state_token = creds_manager.store.store_state_token(
state_token, code_challenge = creds_manager.store.store_state_token(
user_id, provider, requested_scopes
)
login_url = handler.get_login_url(requested_scopes, state_token)
login_url = handler.get_login_url(
requested_scopes, state_token, code_challenge=code_challenge
)
return LoginResponse(login_url=login_url, state_token=state_token)
@ -87,19 +88,21 @@ def callback(
handler = _get_provider_oauth_handler(request, provider)
# Verify the state token
if not creds_manager.store.verify_state_token(user_id, state_token, provider):
valid_state = creds_manager.store.verify_state_token(user_id, state_token, provider)
if not valid_state:
logger.warning(f"Invalid or expired state token for user {user_id}")
raise HTTPException(status_code=400, detail="Invalid or expired state token")
try:
scopes = creds_manager.store.get_any_valid_scopes_from_state_token(
user_id, state_token, provider
)
scopes = valid_state.scopes
logger.debug(f"Retrieved scopes from state token: {scopes}")
scopes = handler.handle_default_scopes(scopes)
credentials = handler.exchange_code_for_tokens(code, scopes)
credentials = handler.exchange_code_for_tokens(
code, scopes, valid_state.code_verifier
)
logger.debug(f"Received credentials with final scopes: {credentials.scopes}")
# Check if the granted scopes are sufficient for the requested scopes

View File

@ -541,7 +541,7 @@ def get_execution_schedules(
@v1_router.post(
"/api-keys",
response_model=list[CreateAPIKeyResponse] | dict[str, str],
response_model=CreateAPIKeyResponse,
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@ -583,7 +583,7 @@ async def get_api_keys(
@v1_router.get(
"/api-keys/{key_id}",
response_model=list[APIKeyWithoutHash] | dict[str, str],
response_model=APIKeyWithoutHash,
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@ -604,7 +604,7 @@ async def get_api_key(
@v1_router.delete(
"/api-keys/{key_id}",
response_model=list[APIKeyWithoutHash] | dict[str, str],
response_model=APIKeyWithoutHash,
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@ -626,7 +626,7 @@ async def delete_api_key(
@v1_router.post(
"/api-keys/{key_id}/suspend",
response_model=list[APIKeyWithoutHash] | dict[str, str],
response_model=APIKeyWithoutHash,
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@ -648,7 +648,7 @@ async def suspend_key(
@v1_router.put(
"/api-keys/{key_id}/permissions",
response_model=list[APIKeyWithoutHash] | dict[str, str],
response_model=APIKeyWithoutHash,
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)

View File

@ -269,6 +269,10 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
notion_client_secret: str = Field(
default="", description="Notion OAuth client secret"
)
twitter_client_id: str = Field(default="", description="Twitter/X OAuth client ID")
twitter_client_secret: str = Field(
default="", description="Twitter/X OAuth client secret"
)
openai_api_key: str = Field(default="", description="OpenAI API key")
anthropic_api_key: str = Field(default="", description="Anthropic API key")

File diff suppressed because it is too large Load Diff

View File

@ -39,8 +39,9 @@ python-dotenv = "^1.0.1"
redis = "^5.2.0"
sentry-sdk = "2.19.2"
strenum = "^0.4.9"
supabase = "^2.10.0"
supabase = "2.11.0"
tenacity = "^9.0.0"
tweepy = "^4.14.0"
uvicorn = { extras = ["standard"], version = "^0.34.0" }
websockets = "^13.1"
youtube-transcript-api = "^0.6.2"

View File

@ -144,51 +144,6 @@ services:
networks:
- app-network
market:
build:
context: ../
dockerfile: autogpt_platform/market/Dockerfile
develop:
watch:
- path: ./
target: autogpt_platform/market/
action: rebuild
depends_on:
db:
condition: service_healthy
market-migrations:
condition: service_completed_successfully
environment:
- SUPABASE_URL=http://kong:8000
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
- SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=market
- BACKEND_CORS_ALLOW_ORIGINS="http://localhost:3000,http://127.0.0.1:3000"
ports:
- "8015:8015"
networks:
- app-network
market-migrations:
build:
context: ../
dockerfile: autogpt_platform/market/Dockerfile
command: ["sh", "-c", "poetry run prisma migrate deploy"]
develop:
watch:
- path: ./
target: autogpt_platform/market/
action: rebuild
depends_on:
db:
condition: service_healthy
environment:
- SUPABASE_URL=http://kong:8000
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
- SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=market
networks:
- app-network
# frontend:
# build:
# context: ../

View File

@ -51,18 +51,6 @@ services:
file: ./docker-compose.platform.yml
service: websocket_server
market:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: market
market-migrations:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: market-migrations
# frontend:
# <<: *agpt-services
# extends:

View File

@ -24,8 +24,8 @@
],
"dependencies": {
"@faker-js/faker": "^9.3.0",
"@hookform/resolvers": "^3.9.1",
"@next/third-parties": "^15.0.4",
"@hookform/resolvers": "^3.10.0",
"@next/third-parties": "^15.1.3",
"@radix-ui/react-alert-dialog": "^1.1.4",
"@radix-ui/react-avatar": "^1.1.1",
"@radix-ui/react-checkbox": "^1.1.2",
@ -59,25 +59,25 @@
"dotenv": "^16.4.7",
"elliptic": "6.6.1",
"embla-carousel-react": "^8.3.0",
"framer-motion": "^11.15.0",
"framer-motion": "^11.16.0",
"geist": "^1.3.1",
"launchdarkly-react-client-sdk": "^3.6.0",
"lucide-react": "^0.468.0",
"lucide-react": "^0.469.0",
"moment": "^2.30.1",
"next": "^14.2.13",
"next-themes": "^0.4.4",
"react": "^18",
"react-day-picker": "^9.4.4",
"react-day-picker": "^9.5.0",
"react-dom": "^18",
"react-hook-form": "^7.54.0",
"react-hook-form": "^7.54.2",
"react-icons": "^5.4.0",
"react-markdown": "^9.0.1",
"react-modal": "^3.16.1",
"react-markdown": "^9.0.3",
"react-modal": "^3.16.3",
"react-shepherd": "^6.1.6",
"recharts": "^2.14.1",
"tailwind-merge": "^2.5.5",
"tailwind-merge": "^2.6.0",
"tailwindcss-animate": "^1.0.7",
"uuid": "^11.0.3",
"uuid": "^11.0.4",
"zod": "^3.23.8"
},
"devDependencies": {
@ -94,16 +94,16 @@
"@storybook/test": "^8.3.5",
"@storybook/test-runner": "^0.21.0",
"@types/negotiator": "^0.6.3",
"@types/node": "^22.9.0",
"@types/node": "^22.10.5",
"@types/react": "^18",
"@types/react-dom": "^18",
"@types/react-modal": "^3.16.3",
"axe-playwright": "^2.0.3",
"chromatic": "^11.12.5",
"concurrently": "^9.1.1",
"chromatic": "^11.22.0",
"concurrently": "^9.1.2",
"eslint": "^8",
"eslint-config-next": "15.1.3",
"eslint-plugin-storybook": "^0.11.0",
"eslint-plugin-storybook": "^0.11.2",
"msw": "^2.7.0",
"msw-storybook-addon": "^2.0.3",
"postcss": "^8",

View File

@ -1,6 +1,6 @@
import React from "react";
import type { Metadata } from "next";
import { Inter } from "next/font/google";
import { Inter, Poppins } from "next/font/google";
import { Providers } from "@/app/providers";
import { cn } from "@/lib/utils";
import { Navbar } from "@/components/agptui/Navbar";
@ -10,8 +10,16 @@ import TallyPopupSimple from "@/components/TallyPopup";
import { GoogleAnalytics } from "@next/third-parties/google";
import { Toaster } from "@/components/ui/toaster";
import { IconType } from "@/components/ui/icons";
import { GeistSans } from "geist/font/sans";
import { GeistMono } from "geist/font/mono";
const inter = Inter({ subsets: ["latin"] });
// Fonts
const inter = Inter({ subsets: ["latin"], variable: "--font-inter" });
const poppins = Poppins({
subsets: ["latin"],
weight: ["400", "500", "600", "700"],
variable: "--font-poppins",
});
export const metadata: Metadata = {
title: "NextGen AutoGPT",
@ -24,7 +32,10 @@ export default async function RootLayout({
children: React.ReactNode;
}>) {
return (
<html lang="en">
<html
lang="en"
className={`${GeistSans.variable} ${GeistMono.variable} ${poppins.variable} ${inter.variable}`}
>
<body className={cn("antialiased transition-colors", inter.className)}>
<Providers
attribute="class"

View File

@ -0,0 +1,11 @@
import { APIKeysSection } from "@/components/agptui/composite/APIKeySection";
const ApiKeysPage = () => {
return (
<div className="w-full pr-4 pt-24 md:pt-0">
<APIKeysSection />
</div>
);
};
export default ApiKeysPage;

View File

@ -8,6 +8,7 @@ export default function Layout({ children }: { children: React.ReactNode }) {
{ text: "Creator Dashboard", href: "/store/dashboard" },
{ text: "Agent dashboard", href: "/store/agent-dashboard" },
{ text: "Integrations", href: "/store/integrations" },
{ text: "API Keys", href: "/store/api_keys" },
{ text: "Profile", href: "/store/profile" },
{ text: "Settings", href: "/store/settings" },
],
@ -17,7 +18,7 @@ export default function Layout({ children }: { children: React.ReactNode }) {
return (
<div className="flex min-h-screen w-screen max-w-[1360px] flex-col lg:flex-row">
<Sidebar linkGroups={sidebarLinkGroups} />
<div className="pl-4">{children}</div>
<div className="flex-1 pl-4">{children}</div>
</div>
);
}

View File

@ -253,7 +253,13 @@ export function CustomNode({
!isHidden &&
(isRequired || isAdvancedOpen || isConnected || !isAdvanced) && (
<div key={propKey} data-id={`input-handle-${propKey}`}>
{isConnectable ? (
{isConnectable &&
!(
"oneOf" in propSchema &&
propSchema.oneOf &&
"discriminator" in propSchema &&
propSchema.discriminator
) ? (
<NodeHandle
keyName={propKey}
isConnected={isConnected}

View File

@ -61,7 +61,7 @@ const TallyPopupSimple = () => {
<Button
variant="default"
onClick={resetTutorial}
className="font-inter mb-0 h-14 w-28 rounded-2xl bg-[rgba(65,65,64,1)] text-left text-lg font-medium leading-6"
className="mb-0 h-14 w-28 rounded-2xl bg-[rgba(65,65,64,1)] text-left font-inter text-lg font-medium leading-6"
>
Tutorial
</Button>

View File

@ -2,7 +2,7 @@ import * as React from "react";
import Link from "next/link";
import { Button } from "./Button";
import { Sheet, SheetContent, SheetTrigger } from "@/components/ui/sheet";
import { Menu } from "lucide-react";
import { KeyIcon, Menu } from "lucide-react";
import {
IconDashboardLayout,
IconIntegrations,
@ -58,6 +58,15 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
Integrations
</div>
</Link>
<Link
href="/store/api_keys"
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
>
<KeyIcon className="h-6 w-6" />
<div className="p-ui-medium text-base font-medium leading-normal">
API Keys
</div>
</Link>
<Link
href="/store/profile"
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
@ -102,6 +111,15 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
Integrations
</div>
</Link>
<Link
href="/store/api_keys"
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
>
<KeyIcon className="h-6 w-6" strokeWidth={1} />
<div className="p-ui-medium text-base font-medium leading-normal">
API Keys
</div>
</Link>
<Link
href="/store/profile"
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"

View File

@ -0,0 +1,296 @@
"use client";
import { useState, useEffect } from "react";
import { APIKey, APIKeyPermission } from "@/lib/autogpt-server-api/types";
import { LuCopy } from "react-icons/lu";
import { Loader2, MoreVertical } from "lucide-react";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import { useToast } from "@/components/ui/use-toast";
import {
Card,
CardContent,
CardDescription,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
DialogTrigger,
} from "@/components/ui/dialog";
import { Button } from "@/components/ui/button";
import { Label } from "@/components/ui/label";
import { Input } from "@/components/ui/input";
import { Checkbox } from "@/components/ui/checkbox";
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "@/components/ui/table";
import { Badge } from "@/components/ui/badge";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
export function APIKeysSection() {
const [apiKeys, setApiKeys] = useState<APIKey[]>([]);
const [isLoading, setIsLoading] = useState(true);
const [isCreateOpen, setIsCreateOpen] = useState(false);
const [isKeyDialogOpen, setIsKeyDialogOpen] = useState(false);
const [newKeyName, setNewKeyName] = useState("");
const [newKeyDescription, setNewKeyDescription] = useState("");
const [newApiKey, setNewApiKey] = useState("");
const [selectedPermissions, setSelectedPermissions] = useState<
APIKeyPermission[]
>([]);
const { toast } = useToast();
const api = useBackendAPI();
useEffect(() => {
loadAPIKeys();
}, []);
const loadAPIKeys = async () => {
setIsLoading(true);
try {
const keys = await api.listAPIKeys();
setApiKeys(keys.filter((key) => key.status === "ACTIVE"));
} finally {
setIsLoading(false);
}
};
const handleCreateKey = async () => {
try {
const response = await api.createAPIKey(
newKeyName,
selectedPermissions,
newKeyDescription,
);
setNewApiKey(response.plain_text_key);
setIsCreateOpen(false);
setIsKeyDialogOpen(true);
loadAPIKeys();
} catch (error) {
toast({
title: "Error",
description: "Failed to create AutoGPT Platform API key",
variant: "destructive",
});
}
};
const handleCopyKey = () => {
navigator.clipboard.writeText(newApiKey);
toast({
title: "Copied",
description: "AutoGPT Platform API key copied to clipboard",
});
};
const handleRevokeKey = async (keyId: string) => {
try {
await api.revokeAPIKey(keyId);
toast({
title: "Success",
description: "AutoGPT Platform API key revoked successfully",
});
loadAPIKeys();
} catch (error) {
toast({
title: "Error",
description: "Failed to revoke AutoGPT Platform API key",
variant: "destructive",
});
}
};
return (
<Card>
<CardHeader>
<CardTitle>AutoGPT Platform API Keys</CardTitle>
<CardDescription>
Manage your AutoGPT Platform API keys for programmatic access
</CardDescription>
</CardHeader>
<CardContent>
<div className="mb-4 flex justify-end">
<Dialog open={isCreateOpen} onOpenChange={setIsCreateOpen}>
<DialogTrigger asChild>
<Button>Create Key</Button>
</DialogTrigger>
<DialogContent>
<DialogHeader>
<DialogTitle>Create New API Key</DialogTitle>
<DialogDescription>
Create a new AutoGPT Platform API key
</DialogDescription>
</DialogHeader>
<div className="grid gap-4 py-4">
<div className="grid gap-2">
<Label htmlFor="name">Name</Label>
<Input
id="name"
value={newKeyName}
onChange={(e) => setNewKeyName(e.target.value)}
placeholder="My AutoGPT Platform API Key"
/>
</div>
<div className="grid gap-2">
<Label htmlFor="description">Description (Optional)</Label>
<Input
id="description"
value={newKeyDescription}
onChange={(e) => setNewKeyDescription(e.target.value)}
placeholder="Used for..."
/>
</div>
<div className="grid gap-2">
<Label>Permissions</Label>
{Object.values(APIKeyPermission).map((permission) => (
<div
className="flex items-center space-x-2"
key={permission}
>
<Checkbox
id={permission}
checked={selectedPermissions.includes(permission)}
onCheckedChange={(checked) => {
setSelectedPermissions(
checked
? [...selectedPermissions, permission]
: selectedPermissions.filter(
(p) => p !== permission,
),
);
}}
/>
<Label htmlFor={permission}>{permission}</Label>
</div>
))}
</div>
</div>
<DialogFooter>
<Button
variant="outline"
onClick={() => setIsCreateOpen(false)}
>
Cancel
</Button>
<Button onClick={handleCreateKey}>Create</Button>
</DialogFooter>
</DialogContent>
</Dialog>
<Dialog open={isKeyDialogOpen} onOpenChange={setIsKeyDialogOpen}>
<DialogContent>
<DialogHeader>
<DialogTitle>AutoGPT Platform API Key Created</DialogTitle>
<DialogDescription>
Please copy your AutoGPT API key now. You won&apos;t be able
to see it again!
</DialogDescription>
</DialogHeader>
<div className="flex items-center space-x-2">
<code className="flex-1 rounded-md bg-secondary p-2 text-sm">
{newApiKey}
</code>
<Button size="icon" variant="outline" onClick={handleCopyKey}>
<LuCopy className="h-4 w-4" />
</Button>
</div>
<DialogFooter>
<Button onClick={() => setIsKeyDialogOpen(false)}>Close</Button>
</DialogFooter>
</DialogContent>
</Dialog>
</div>
{isLoading ? (
<div className="flex justify-center p-4">
<Loader2 className="h-6 w-6 animate-spin" />
</div>
) : (
apiKeys.length > 0 && (
<Table>
<TableHeader>
<TableRow>
<TableHead>Name</TableHead>
<TableHead>API Key</TableHead>
<TableHead>Status</TableHead>
<TableHead>Created</TableHead>
<TableHead>Last Used</TableHead>
<TableHead></TableHead>
</TableRow>
</TableHeader>
<TableBody>
{apiKeys.map((key) => (
<TableRow key={key.id}>
<TableCell>{key.name}</TableCell>
<TableCell>
<div className="rounded-md border p-1 px-2 text-xs">
{`${key.prefix}******************${key.postfix}`}
</div>
</TableCell>
<TableCell>
<Badge
variant={
key.status === "ACTIVE" ? "default" : "destructive"
}
className={
key.status === "ACTIVE"
? "border-green-600 bg-green-100 text-green-800"
: "border-red-600 bg-red-100 text-red-800"
}
>
{key.status}
</Badge>
</TableCell>
<TableCell>
{new Date(key.created_at).toLocaleDateString()}
</TableCell>
<TableCell>
{key.last_used_at
? new Date(key.last_used_at).toLocaleDateString()
: "Never"}
</TableCell>
<TableCell>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant="ghost" size="sm">
<MoreVertical className="h-4 w-4" />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
<DropdownMenuItem
className="text-destructive"
onClick={() => handleRevokeKey(key.id)}
>
Revoke
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
)
)}
</CardContent>
</Card>
);
}

View File

@ -7,8 +7,15 @@ import SchemaTooltip from "@/components/SchemaTooltip";
import useCredentials from "@/hooks/useCredentials";
import { zodResolver } from "@hookform/resolvers/zod";
import { NotionLogoIcon } from "@radix-ui/react-icons";
import { FaDiscord, FaGithub, FaGoogle, FaMedium, FaKey } from "react-icons/fa";
import { FC, useState } from "react";
import {
FaDiscord,
FaGithub,
FaTwitter,
FaGoogle,
FaMedium,
FaKey,
} from "react-icons/fa";
import { FC, useMemo, useState } from "react";
import {
CredentialsMetaInput,
CredentialsProviderName,
@ -53,6 +60,7 @@ export const providerIcons: Record<
google: FaGoogle,
groq: fallbackIcon,
notion: NotionLogoIcon,
nvidia: fallbackIcon,
discord: FaDiscord,
d_id: fallbackIcon,
google_maps: FaGoogle,
@ -69,6 +77,7 @@ export const providerIcons: Record<
reddit: fallbackIcon,
fal: fallbackIcon,
revid: fallbackIcon,
twitter: FaTwitter,
unreal_speech: fallbackIcon,
exa: fallbackIcon,
hubspot: fallbackIcon,

View File

@ -32,6 +32,7 @@ const providerDisplayNames: Record<CredentialsProviderName, string> = {
jina: "Jina",
medium: "Medium",
notion: "Notion",
nvidia: "Nvidia",
ollama: "Ollama",
openai: "OpenAI",
openweathermap: "OpenWeatherMap",
@ -41,6 +42,7 @@ const providerDisplayNames: Record<CredentialsProviderName, string> = {
reddit: "Reddit",
replicate: "Replicate",
revid: "Rev.ID",
twitter: "Twitter",
unreal_speech: "Unreal Speech",
} as const;
// --8<-- [end:CredentialsProviderNames]

View File

@ -17,6 +17,7 @@ import {
BlockIOStringSubSchema,
BlockIONumberSubSchema,
BlockIOBooleanSubSchema,
BlockIOSimpleTypeSubSchema,
} from "@/lib/autogpt-server-api/types";
import React, { FC, useCallback, useEffect, useMemo, useState } from "react";
import { Button } from "./ui/button";
@ -40,6 +41,7 @@ import { LocalValuedInput } from "./ui/input";
import NodeHandle from "./NodeHandle";
import { ConnectionData } from "./CustomNode";
import { CredentialsInput } from "./integrations/credentials-input";
import { MultiSelect } from "./ui/multiselect-input";
type NodeObjectInputTreeProps = {
nodeId: string;
@ -101,6 +103,92 @@ const NodeObjectInputTree: FC<NodeObjectInputTreeProps> = ({
export default NodeObjectInputTree;
const NodeImageInput: FC<{
selfKey: string;
schema: BlockIOStringSubSchema;
value?: string;
error?: string;
handleInputChange: NodeObjectInputTreeProps["handleInputChange"];
className?: string;
displayName: string;
}> = ({
selfKey,
schema,
value = "",
error,
handleInputChange,
className,
displayName,
}) => {
const handleFileChange = useCallback(
async (event: React.ChangeEvent<HTMLInputElement>) => {
const file = event.target.files?.[0];
if (!file) return;
// Validate file type
if (!file.type.startsWith("image/")) {
console.error("Please upload an image file");
return;
}
// Convert to base64
const reader = new FileReader();
reader.onload = (e) => {
const base64String = (e.target?.result as string).split(",")[1];
handleInputChange(selfKey, base64String);
};
reader.readAsDataURL(file);
},
[selfKey, handleInputChange],
);
return (
<div className={cn("flex flex-col gap-2", className)}>
<div className="nodrag flex flex-col gap-2">
<div className="flex items-center gap-2">
<Button
variant="outline"
onClick={() =>
document.getElementById(`${selfKey}-upload`)?.click()
}
className="w-full"
>
{value ? "Change Image" : `Upload ${displayName}`}
</Button>
{value && (
<Button
variant="ghost"
className="text-red-500 hover:text-red-700"
onClick={() => handleInputChange(selfKey, "")}
>
<Cross2Icon className="h-4 w-4" />
</Button>
)}
</div>
<input
id={`${selfKey}-upload`}
type="file"
accept="image/*"
onChange={handleFileChange}
className="hidden"
/>
{value && (
<div className="relative mt-2 rounded-md border border-gray-300 p-2 dark:border-gray-600">
<img
src={`data:image/jpeg;base64,${value}`}
alt="Preview"
className="max-h-32 w-full rounded-md object-contain"
/>
</div>
)}
</div>
{error && <span className="error-message">{error}</span>}
</div>
);
};
const NodeDateTimeInput: FC<{
selfKey: string;
schema: BlockIOStringSubSchema;
@ -225,6 +313,8 @@ export const NodeGenericInputField: FC<{
);
}
console.log("propSchema", propSchema);
if ("properties" in propSchema) {
// Render a multi-select for all-boolean sub-schemas with more than 3 properties
if (
@ -290,12 +380,53 @@ export const NodeGenericInputField: FC<{
}
if ("anyOf" in propSchema) {
// Optional oneOf
if (
"oneOf" in propSchema.anyOf[0] &&
propSchema.anyOf[0].oneOf &&
"discriminator" in propSchema.anyOf[0] &&
propSchema.anyOf[0].discriminator
) {
return (
<NodeOneOfDiscriminatorField
nodeId={nodeId}
propKey={propKey}
propSchema={propSchema.anyOf[0]}
currentValue={currentValue}
errors={errors}
connections={connections}
handleInputChange={handleInputChange}
handleInputClick={handleInputClick}
className={className}
displayName={displayName}
/>
);
}
// optional items
const types = propSchema.anyOf.map((s) =>
"type" in s ? s.type : undefined,
);
if (types.includes("string") && types.includes("null")) {
// optional string
// optional string and datetime
if (
"format" in propSchema.anyOf[0] &&
propSchema.anyOf[0].format === "date-time"
) {
return (
<NodeDateTimeInput
selfKey={propKey}
schema={propSchema.anyOf[0]}
value={currentValue}
error={errors[propKey]}
className={className}
displayName={displayName}
handleInputChange={handleInputChange}
/>
);
}
return (
<NodeStringInput
selfKey={propKey}
@ -356,6 +487,42 @@ export const NodeGenericInputField: FC<{
/>
);
} else if (types.includes("object") && types.includes("null")) {
// rendering optional mutliselect
if (
Object.values(
(propSchema.anyOf[0] as BlockIOObjectSubSchema).properties,
).every(
(subSchema) => "type" in subSchema && subSchema.type == "boolean",
) &&
Object.keys((propSchema.anyOf[0] as BlockIOObjectSubSchema).properties)
.length >= 1
) {
const options = Object.keys(
(propSchema.anyOf[0] as BlockIOObjectSubSchema).properties,
);
const selectedKeys = Object.entries(currentValue || {})
.filter(([_, v]) => v)
.map(([k, _]) => k);
return (
<NodeMultiSelectInput
selfKey={propKey}
schema={propSchema.anyOf[0] as BlockIOObjectSubSchema}
selection={selectedKeys}
error={errors[propKey]}
className={className}
displayName={displayName}
handleInputChange={(key, selection) => {
handleInputChange(
key,
Object.fromEntries(
options.map((option) => [option, selection.includes(option)]),
),
);
}}
/>
);
}
return (
<NodeKeyValueInput
nodeId={nodeId}
@ -418,6 +585,19 @@ export const NodeGenericInputField: FC<{
switch (propSchema.type) {
case "string":
if ("image_upload" in propSchema && propSchema.image_upload === true) {
return (
<NodeImageInput
selfKey={propKey}
schema={propSchema}
value={currentValue}
error={errors[propKey]}
className={className}
displayName={displayName}
handleInputChange={handleInputChange}
/>
);
}
if ("format" in propSchema && propSchema.format === "date-time") {
return (
<NodeDateTimeInput
@ -523,7 +703,7 @@ const NodeOneOfDiscriminatorField: FC<{
propSchema: any;
currentValue?: any;
errors: { [key: string]: string | undefined };
connections: any;
connections: ConnectionData;
handleInputChange: (key: string, value: any) => void;
handleInputClick: (key: string) => void;
className?: string;
@ -538,7 +718,6 @@ const NodeOneOfDiscriminatorField: FC<{
handleInputChange,
handleInputClick,
className,
displayName,
}) => {
const discriminator = propSchema.discriminator;
@ -554,7 +733,7 @@ const NodeOneOfDiscriminatorField: FC<{
return {
value: variantDiscValue,
schema: variant,
schema: variant as BlockIOSubSchema,
};
})
.filter((v: any) => v.value != null);
@ -585,8 +764,24 @@ const NodeOneOfDiscriminatorField: FC<{
(opt: any) => opt.value === chosenType,
)?.schema;
function getEntryKey(key: string): string {
// use someKey for handle purpose (not childKey)
return `${propKey}_#_${key}`;
}
function isConnected(key: string): boolean {
return connections.some(
(c) => c.targetHandle === getEntryKey(key) && c.target === nodeId,
);
}
return (
<div className={cn("flex flex-col space-y-2", className)}>
<div
className={cn(
"flex min-w-[400px] max-w-[95%] flex-col space-y-4",
className,
)}
>
<Select value={chosenType || ""} onValueChange={handleVariantChange}>
<SelectTrigger className="w-full">
<SelectValue placeholder="Select a type..." />
@ -607,32 +802,36 @@ const NodeOneOfDiscriminatorField: FC<{
if (someKey === "discriminator") {
return null;
}
const childKey = propKey ? `${propKey}.${someKey}` : someKey;
const childKey = propKey ? `${propKey}.${someKey}` : someKey; // for history redo/undo purpose
return (
<div
key={childKey}
className="flex w-full flex-row justify-between space-y-2"
className="mb-4 flex w-full flex-col justify-between space-y-2"
>
<span className="mr-2 mt-3 dark:text-gray-300">
{(childSchema as BlockIOSubSchema).title ||
beautifyString(someKey)}
</span>
<NodeGenericInputField
nodeId={nodeId}
key={propKey}
propKey={childKey}
propSchema={childSchema as BlockIOSubSchema}
currentValue={
currentValue ? currentValue[someKey] : undefined
}
errors={errors}
connections={connections}
handleInputChange={handleInputChange}
handleInputClick={handleInputClick}
displayName={
chosenVariantSchema.title || beautifyString(someKey)
}
<NodeHandle
keyName={getEntryKey(someKey)}
schema={childSchema as BlockIOSubSchema}
isConnected={isConnected(getEntryKey(someKey))}
isRequired={false}
side="left"
/>
{!isConnected(someKey) && (
<NodeGenericInputField
nodeId={nodeId}
key={propKey}
propKey={childKey}
propSchema={childSchema as BlockIOSubSchema}
currentValue={
currentValue ? currentValue[someKey] : undefined
}
errors={errors}
connections={connections}
handleInputChange={handleInputChange}
handleInputClick={handleInputClick}
displayName={beautifyString(someKey)}
/>
)}
</div>
);
},
@ -827,6 +1026,13 @@ const NodeKeyValueInput: FC<{
);
};
// Checking if schema is type of string
function isStringSubSchema(
schema: BlockIOSimpleTypeSubSchema,
): schema is BlockIOStringSubSchema {
return "type" in schema && schema.type === "string";
}
const NodeArrayInput: FC<{
nodeId: string;
selfKey: string;

View File

@ -17,6 +17,9 @@ const isValidVideoUrl = (url: string): boolean => {
};
const isValidImageUrl = (url: string): boolean => {
if (url.startsWith("data:image/")) {
return true;
}
const imageExtensions = /\.(jpeg|jpg|gif|png|svg|webp)$/i;
const cleanedUrl = url.split("?")[0];
return imageExtensions.test(cleanedUrl);
@ -50,19 +53,21 @@ const VideoRenderer: React.FC<{ videoUrl: string }> = ({ videoUrl }) => {
);
};
const ImageRenderer: React.FC<{ imageUrl: string }> = ({ imageUrl }) => (
<div className="w-full p-2">
<picture>
<img
src={imageUrl}
alt="Image"
className="h-auto max-w-full"
width="100%"
height="auto"
/>
</picture>
</div>
);
const ImageRenderer: React.FC<{ imageUrl: string }> = ({ imageUrl }) => {
return (
<div className="w-full p-2">
<picture>
<img
src={imageUrl}
alt="Image"
className="h-auto max-w-full"
width="100%"
height="auto"
/>
</picture>
</div>
);
};
const AudioRenderer: React.FC<{ audioUrl: string }> = ({ audioUrl }) => (
<div className="w-full p-2">
@ -92,6 +97,9 @@ export const ContentRenderer: React.FC<{
truncateLongData?: boolean;
}> = ({ value, truncateLongData }) => {
if (typeof value === "string") {
if (value.startsWith("data:image/")) {
return <ImageRenderer imageUrl={value} />;
}
if (isValidVideoUrl(value)) {
return <VideoRenderer videoUrl={value} />;
} else if (isValidImageUrl(value)) {

View File

@ -31,6 +31,9 @@ import {
Schedule,
UserPasswordCredentials,
Credentials,
APIKeyPermission,
CreateAPIKeyResponse,
APIKey,
} from "./types";
import { createBrowserClient } from "@supabase/ssr";
import getServerSupabase from "../supabase/getServerSupabase";
@ -230,6 +233,36 @@ export default class BackendAPI {
);
}
// API Key related requests
async createAPIKey(
name: string,
permissions: APIKeyPermission[],
description?: string,
): Promise<CreateAPIKeyResponse> {
return this._request("POST", "/api-keys", {
name,
permissions,
description,
});
}
async listAPIKeys(): Promise<APIKey[]> {
return this._get("/api-keys");
}
async revokeAPIKey(keyId: string): Promise<APIKey> {
return this._request("DELETE", `/api-keys/${keyId}`);
}
async updateAPIKeyPermissions(
keyId: string,
permissions: APIKeyPermission[],
): Promise<APIKey> {
return this._request("PUT", `/api-keys/${keyId}/permissions`, {
permissions,
});
}
/**
* @returns `true` if a ping event was received, `false` if provider doesn't support pinging but the webhook exists.
* @throws `Error` if the webhook does not exist.

View File

@ -41,7 +41,7 @@ export type BlockIOSubSchema =
| BlockIOSimpleTypeSubSchema
| BlockIOCombinedTypeSubSchema;
type BlockIOSimpleTypeSubSchema =
export type BlockIOSimpleTypeSubSchema =
| BlockIOObjectSubSchema
| BlockIOCredentialsSubSchema
| BlockIOKVSubSchema
@ -121,12 +121,14 @@ export const PROVIDER_NAMES = {
JINA: "jina",
MEDIUM: "medium",
NOTION: "notion",
NVIDIA: "nvidia",
OLLAMA: "ollama",
OPENAI: "openai",
OPENWEATHERMAP: "openweathermap",
OPEN_ROUTER: "open_router",
PINECONE: "pinecone",
SLANT3D: "slant3d",
TWITTER: "twitter",
REPLICATE: "replicate",
REDDIT: "reddit",
REVID: "revid",
@ -525,3 +527,36 @@ export type StoreReviewCreate = {
score: number;
comments?: string;
};
// API Key Types
export enum APIKeyPermission {
EXECUTE_GRAPH = "EXECUTE_GRAPH",
READ_GRAPH = "READ_GRAPH",
EXECUTE_BLOCK = "EXECUTE_BLOCK",
READ_BLOCK = "READ_BLOCK",
}
export enum APIKeyStatus {
ACTIVE = "ACTIVE",
REVOKED = "REVOKED",
SUSPENDED = "SUSPENDED",
}
export interface APIKey {
id: string;
name: string;
prefix: string;
postfix: string;
status: APIKeyStatus;
permissions: APIKeyPermission[];
created_at: string;
last_used_at?: string;
revoked_at?: string;
description?: string;
}
export interface CreateAPIKeyResponse {
api_key: APIKey;
plain_text_key: string;
}

View File

@ -18,6 +18,8 @@ const config = {
mono: ["var(--font-geist-mono)"],
// Include the custom font family
neue: ['"PP Neue Montreal TT"', "sans-serif"],
poppin: ["var(--font-poppins)"],
inter: ["var(--font-inter)"],
},
colors: {
border: "hsl(var(--border))",

View File

@ -1237,10 +1237,10 @@
dependencies:
"@hapi/hoek" "^9.0.0"
"@hookform/resolvers@^3.9.1":
version "3.9.1"
resolved "https://registry.yarnpkg.com/@hookform/resolvers/-/resolvers-3.9.1.tgz#a23883c40bfd449cb6c6ab5a0fa0729184c950ff"
integrity sha512-ud2HqmGBM0P0IABqoskKWI6PEf6ZDDBZkFqe2Vnl+mTHCEHzr3ISjjZyCwTjC/qpL25JC9aIDkloQejvMeq0ug==
"@hookform/resolvers@^3.10.0":
version "3.10.0"
resolved "https://registry.yarnpkg.com/@hookform/resolvers/-/resolvers-3.10.0.tgz#7bfd18113daca4e57e27e1205b7d5a2d371aa59a"
integrity sha512-79Dv+3mDF7i+2ajj7SkypSKHhl1cbln1OGavqrsF7p6mbUv11xpqpacPsGDCTRvCSjEEIez2ef1NveSVL3b0Ag==
"@humanwhocodes/config-array@^0.13.0":
version "0.13.0"
@ -1750,10 +1750,10 @@
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.20.tgz#689bc7beb8005b73c95d926e7edfb7f73efc78f2"
integrity sha512-AFmqeLW6LtxeFTuoB+MXFeM5fm5052i3MU6xD0WzJDOwku6SkZaxb1bxjBaRC8uNqTRTSPl0yMFtjNowIVI67w==
"@next/third-parties@^15.0.4":
version "15.1.0"
resolved "https://registry.yarnpkg.com/@next/third-parties/-/third-parties-15.1.0.tgz#ab898927a006fe41ef90888220b51e22e11e110c"
integrity sha512-eiv8vTo5HJOE/LabnIjRNVpN0hvjXfqPrE7D/XecmWvHBs9KrIISxlb1NZizDMcvjGtnHkdupWsquM9ur25rYw==
"@next/third-parties@^15.1.3":
version "15.1.3"
resolved "https://registry.yarnpkg.com/@next/third-parties/-/third-parties-15.1.3.tgz#89c5c85b68d98a8787f0fa43a8d55ade7d6d5cf2"
integrity sha512-nz2mthh08xMRgNKRA+Z7lM1BqHqukGcFyu5z0nXFo3/KXsBgaPJkfnkfebw/YTqkxryV+aEttf/iAWDB6dUO6A==
dependencies:
third-party-capital "1.0.20"
@ -1801,10 +1801,10 @@
resolved "https://registry.yarnpkg.com/@open-draft/until/-/until-2.1.0.tgz#0acf32f470af2ceaf47f095cdecd40d68666efda"
integrity sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==
"@opentelemetry/api-logs@0.52.1":
version "0.52.1"
resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.52.1.tgz#52906375da4d64c206b0c4cb8ffa209214654ecc"
integrity sha512-qnSqB2DQ9TPP96dl8cDubDvrUyWc0/sK81xHTK8eSUspzDM3bsewX903qclQFvVhgStjRWdC5bLb3kQqMkfV5A==
"@opentelemetry/api-logs@0.53.0":
version "0.53.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.53.0.tgz#c478cbd8120ec2547b64edfa03a552cfe42170be"
integrity sha512-8HArjKx+RaAI8uEIgcORbZIPklyh1YLjPSBus8hjRmvLi6DeFzgOcdZ7KwPabKj8mXF8dX0hyfAyGfycz0DbFw==
dependencies:
"@opentelemetry/api" "^1.0.0"
@ -2052,13 +2052,13 @@
semver "^7.5.2"
shimmer "^1.2.1"
"@opentelemetry/instrumentation@^0.49 || ^0.50 || ^0.51 || ^0.52.0":
version "0.52.1"
resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation/-/instrumentation-0.52.1.tgz#2e7e46a38bd7afbf03cf688c862b0b43418b7f48"
integrity sha512-uXJbYU/5/MBHjMp1FqrILLRuiJCs3Ofk0MeRDk8g1S1gD47U8X3JnSwcMO1rtRo1x1a7zKaQHaoYu49p/4eSKw==
"@opentelemetry/instrumentation@^0.49 || ^0.50 || ^0.51 || ^0.52.0 || ^0.53.0":
version "0.53.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation/-/instrumentation-0.53.0.tgz#e6369e4015eb5112468a4d45d38dcada7dad892d"
integrity sha512-DMwg0hy4wzf7K73JJtl95m/e0boSoWhH07rfvHvYzQtBD3Bmv0Wc1x733vyZBqmFm8OjJD0/pfiUg1W3JjFX0A==
dependencies:
"@opentelemetry/api-logs" "0.52.1"
"@types/shimmer" "^1.0.2"
"@opentelemetry/api-logs" "0.53.0"
"@types/shimmer" "^1.2.0"
import-in-the-middle "^1.8.1"
require-in-the-middle "^7.1.1"
semver "^7.5.2"
@ -2128,13 +2128,13 @@
schema-utils "^4.2.0"
source-map "^0.7.3"
"@prisma/instrumentation@5.19.1":
version "5.19.1"
resolved "https://registry.yarnpkg.com/@prisma/instrumentation/-/instrumentation-5.19.1.tgz#146319cf85f22b7a43296f0f40cfeac55516e66e"
integrity sha512-VLnzMQq7CWroL5AeaW0Py2huiNKeoMfCH3SUxstdzPrlWQi6UQ9UrfcbUkNHlVFqOMacqy8X/8YtE0kuKDpD9w==
"@prisma/instrumentation@5.22.0":
version "5.22.0"
resolved "https://registry.yarnpkg.com/@prisma/instrumentation/-/instrumentation-5.22.0.tgz#c39941046e9886e17bdb47dbac45946c24d579aa"
integrity sha512-LxccF392NN37ISGxIurUljZSh1YWnphO34V5a0+T7FVQG2u9bhAXRTJpgmQ3483woVhkraQZFF7cbRrpbw/F4Q==
dependencies:
"@opentelemetry/api" "^1.8"
"@opentelemetry/instrumentation" "^0.49 || ^0.50 || ^0.51 || ^0.52.0"
"@opentelemetry/instrumentation" "^0.49 || ^0.50 || ^0.51 || ^0.52.0 || ^0.53.0"
"@opentelemetry/sdk-trace-base" "^1.22"
"@radix-ui/number@1.1.0":
@ -2630,51 +2630,51 @@
resolved "https://registry.yarnpkg.com/@scarf/scarf/-/scarf-1.4.0.tgz#3bbb984085dbd6d982494538b523be1ce6562972"
integrity sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==
"@sentry-internal/browser-utils@8.45.1":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry-internal/browser-utils/-/browser-utils-8.45.1.tgz#1ca97f1dfad8a7f5543074b4abd11dc6bc6a1c7b"
integrity sha512-sZwtP3zAzDsjUS7WkMW5VGbvSl7hGKTMc8gAJbpEsrybMxllIP13zzMRwpeFF11RnnvbrZ/FtAeX58Mvj0jahA==
"@sentry-internal/browser-utils@8.48.0":
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry-internal/browser-utils/-/browser-utils-8.48.0.tgz#320713e29566929894de42d54152064ec19cc9b3"
integrity sha512-pLtu0Fa1Ou0v3M1OEO1MB1EONJVmXEGtoTwFRCO1RPQI2ulmkG6BikINClFG5IBpoYKZ33WkEXuM6U5xh+pdZg==
dependencies:
"@sentry/core" "8.45.1"
"@sentry/core" "8.48.0"
"@sentry-internal/feedback@8.45.1":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-8.45.1.tgz#8e8f3bd25408bce6c65d6a9176df065d1ba5b568"
integrity sha512-zCKptzki4SLnG+s8je8dgnppOKFjiiO4GVBc4fh7uL8zjNPBnxW8wK4SrPfAEKVYaHUzkKc5vixwUqcpmfLLGw==
"@sentry-internal/feedback@8.48.0":
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-8.48.0.tgz#92d2301b0e7379716efae6c05bc4a4740687921a"
integrity sha512-6PwcJNHVPg0EfZxmN+XxVOClfQpv7MBAweV8t9i5l7VFr8sM/7wPNSeU/cG7iK19Ug9ZEkBpzMOe3G4GXJ5bpw==
dependencies:
"@sentry/core" "8.45.1"
"@sentry/core" "8.48.0"
"@sentry-internal/replay-canvas@8.45.1":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry-internal/replay-canvas/-/replay-canvas-8.45.1.tgz#748272da93a23a5323f9ef381ef3c7189a177447"
integrity sha512-qiPg6XwOwkiMMe/8Qf3EhXCqkSlSnWLlorYngIbdkV2klbWjd7vKnqkFJF4PnaS0g7kkZr7nh+MdzpyLyuj2Mw==
"@sentry-internal/replay-canvas@8.48.0":
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry-internal/replay-canvas/-/replay-canvas-8.48.0.tgz#f88282b0594751407ca3016d0a63b133c2e37ac3"
integrity sha512-LdivLfBXXB9us1aAc6XaL7/L2Ob4vi3C/fEOXElehg3qHjX6q6pewiv5wBvVXGX1NfZTRvu+X11k6TZoxKsezw==
dependencies:
"@sentry-internal/replay" "8.45.1"
"@sentry/core" "8.45.1"
"@sentry-internal/replay" "8.48.0"
"@sentry/core" "8.48.0"
"@sentry-internal/replay@8.45.1":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry-internal/replay/-/replay-8.45.1.tgz#f05518adbe17566b1df6d2a2016d81f5f6c1c9a6"
integrity sha512-cOA9CodNSR9+hmICDaGIDUvWiwxQxeMHk/esbjB8uAW8HG4CYTG3CTYTZmlmou7DuysfMd4JNuFmDFBj+YU5/A==
"@sentry-internal/replay@8.48.0":
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry-internal/replay/-/replay-8.48.0.tgz#2cc802178f6b0185581b61058f2541b9f3384a8b"
integrity sha512-csILVupc5RkrsTrncuUTGmlB56FQSFjXPYWG8I8yBTGlXEJ+o8oTuF6+55R4vbw3EIzBveXWi4kEBbnQlXW/eg==
dependencies:
"@sentry-internal/browser-utils" "8.45.1"
"@sentry/core" "8.45.1"
"@sentry-internal/browser-utils" "8.48.0"
"@sentry/core" "8.48.0"
"@sentry/babel-plugin-component-annotate@2.22.7":
version "2.22.7"
resolved "https://registry.yarnpkg.com/@sentry/babel-plugin-component-annotate/-/babel-plugin-component-annotate-2.22.7.tgz#604c7e33d48528a13477e7af597c4d5fca51b8bd"
integrity sha512-aa7XKgZMVl6l04NY+3X7BP7yvQ/s8scn8KzQfTLrGRarziTlMGrsCOBQtCNWXOPEbtxAIHpZ9dsrAn5EJSivOQ==
"@sentry/browser@8.45.1":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-8.45.1.tgz#a7b9af5deefb57778fbb526cb8e43fa2f6ae3d97"
integrity sha512-/KvYhQSRg8m9kotG8h9FrfXCWRlebrvdfXKjj1oE9SyZ2LmR8Ze9AcEw1qzsBsa1F1D/a5FQbUJahSoLBkaQPA==
"@sentry/browser@8.48.0":
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-8.48.0.tgz#bdd7793ddd3ae7a65d595066bde93fbb63ce8b9d"
integrity sha512-fuuVULB5/1vI8NoIwXwR3xwhJJqk+y4RdSdajExGF7nnUDBpwUJyXsmYJnOkBO+oLeEs58xaCpotCKiPUNnE3g==
dependencies:
"@sentry-internal/browser-utils" "8.45.1"
"@sentry-internal/feedback" "8.45.1"
"@sentry-internal/replay" "8.45.1"
"@sentry-internal/replay-canvas" "8.45.1"
"@sentry/core" "8.45.1"
"@sentry-internal/browser-utils" "8.48.0"
"@sentry-internal/feedback" "8.48.0"
"@sentry-internal/replay" "8.48.0"
"@sentry-internal/replay-canvas" "8.48.0"
"@sentry/core" "8.48.0"
"@sentry/bundler-plugin-core@2.22.7":
version "2.22.7"
@ -2744,35 +2744,35 @@
"@sentry/cli-win32-i686" "2.39.1"
"@sentry/cli-win32-x64" "2.39.1"
"@sentry/core@8.45.1":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry/core/-/core-8.45.1.tgz#da3b13a8fd2276e8a1d4f5a38f9b8a0ed6647b49"
integrity sha512-1fGmkr0paZshh38mD29c4CfkRkgFoYDaAGyDLoGYfTbEph/lU8RHB2HWzN93McqNdMEhl1DRRyqIasUZoPlqSA==
"@sentry/core@8.48.0":
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry/core/-/core-8.48.0.tgz#3bb8d06305f0ec7c873453844687deafdeab168b"
integrity sha512-VGwYgTfLpvJ5LRO5A+qWo1gpo6SfqaGXL9TOzVgBucAdpzbrYHpZ87sEarDVq/4275uk1b0S293/mfsskFczyw==
"@sentry/nextjs@^8":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry/nextjs/-/nextjs-8.45.1.tgz#4bad8fe78709c8efe63f34d5b4ed86bfa086efd6"
integrity sha512-EcUuQHGAk8cheuPfjRCXacjoD5ClDsB9qssYBVIvJozNEW5C+A0eJwPj/Qd1C05tdcJ6MMdMRv0NrTHdCN1v1A==
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry/nextjs/-/nextjs-8.48.0.tgz#4882fe8a92833e333c13d276d62648295769106a"
integrity sha512-eKbhUW+9KCyK2xIO09iUI3KszfCxtmKgamSYED+N5bb1DzySjDur6BabHFBgA7BcQmYKpTSj/lVxznFNw3H1uQ==
dependencies:
"@opentelemetry/api" "^1.9.0"
"@opentelemetry/semantic-conventions" "^1.28.0"
"@rollup/plugin-commonjs" "28.0.1"
"@sentry-internal/browser-utils" "8.45.1"
"@sentry/core" "8.45.1"
"@sentry/node" "8.45.1"
"@sentry/opentelemetry" "8.45.1"
"@sentry/react" "8.45.1"
"@sentry/vercel-edge" "8.45.1"
"@sentry-internal/browser-utils" "8.48.0"
"@sentry/core" "8.48.0"
"@sentry/node" "8.48.0"
"@sentry/opentelemetry" "8.48.0"
"@sentry/react" "8.48.0"
"@sentry/vercel-edge" "8.48.0"
"@sentry/webpack-plugin" "2.22.7"
chalk "3.0.0"
resolve "1.22.8"
rollup "3.29.5"
stacktrace-parser "^0.1.10"
"@sentry/node@8.45.1":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry/node/-/node-8.45.1.tgz#24ebe7cb6a1ddc3d95602945a9574978797ac6f9"
integrity sha512-xvlXifM/FSOQdLAqQBuo04SiOh7RP8rRRr+c5G/YbBtgJA867Pve0X8JZK2BJpDZ3OrGvzXV1Ubnt9ao4rBfYA==
"@sentry/node@8.48.0":
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry/node/-/node-8.48.0.tgz#d4d1374431028af7663a06bf7268bf40a9bf1fa0"
integrity sha512-pnprAuUOc8cxnJdZA09hutHXNsbQZoDgzf3zPyXMNx0ewB/RviFMOgfe7ViX1mIB/oVrcFenXBgO5uvTd7JwPg==
dependencies:
"@opentelemetry/api" "^1.9.0"
"@opentelemetry/context-async-hooks" "^1.29.0"
@ -2805,34 +2805,34 @@
"@opentelemetry/resources" "^1.29.0"
"@opentelemetry/sdk-trace-base" "^1.29.0"
"@opentelemetry/semantic-conventions" "^1.28.0"
"@prisma/instrumentation" "5.19.1"
"@sentry/core" "8.45.1"
"@sentry/opentelemetry" "8.45.1"
"@prisma/instrumentation" "5.22.0"
"@sentry/core" "8.48.0"
"@sentry/opentelemetry" "8.48.0"
import-in-the-middle "^1.11.2"
"@sentry/opentelemetry@8.45.1":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry/opentelemetry/-/opentelemetry-8.45.1.tgz#4c8e686818fb6af45ed486c341902affd9f110cf"
integrity sha512-khnR5TS21ksITTXmXnpniRN7brlZS5RNNQuMZ9n3MYi/L1/s9LT73skNh1gder28OV6ZxGUgrTZ+1dtKqn9tig==
"@sentry/opentelemetry@8.48.0":
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry/opentelemetry/-/opentelemetry-8.48.0.tgz#718e7942724d64ffe8e901941b0e4050fa07780b"
integrity sha512-1JLXgmIvD3T7xn9ypwWW0V3GirNy4BN2fOUbZau/nUX/Jj5DttSoPn7x7xTaPSpfaA24PiP93zXmJEfZvCk00Q==
dependencies:
"@sentry/core" "8.45.1"
"@sentry/core" "8.48.0"
"@sentry/react@8.45.1":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry/react/-/react-8.45.1.tgz#75afd1508cfdda9dbe7109ffbbb58767464bf56a"
integrity sha512-ooMR2Lt4YSc5CMJklBKiL/mb+uidcZMpflxUvVUbtWMif+PqTUkfPRyICv6vs7muxK9i84Rr4iCkyZ4ns4H27A==
"@sentry/react@8.48.0":
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry/react/-/react-8.48.0.tgz#0550a9a4d123d20c680d94bdaa8a8dbeb3b2661e"
integrity sha512-J8XAUOJYbsjXnowTEXE+zWJWLWUzQGP8kMb+smoGdRzFJwwXKrbE709Kr/Boz6rK48EbbRT4UUINoTbHgL3RHQ==
dependencies:
"@sentry/browser" "8.45.1"
"@sentry/core" "8.45.1"
"@sentry/browser" "8.48.0"
"@sentry/core" "8.48.0"
hoist-non-react-statics "^3.3.2"
"@sentry/vercel-edge@8.45.1":
version "8.45.1"
resolved "https://registry.yarnpkg.com/@sentry/vercel-edge/-/vercel-edge-8.45.1.tgz#d32b6302d5f062c2a641cbcf2be8614e99a9b30c"
integrity sha512-taCKf2ESNzpYRwuaikS17YA2upqzzPbemT+If+kOafgSUrLEq2YRYRvgoruyFLmjOGO9+634+HmGo7Nyn5CGjQ==
"@sentry/vercel-edge@8.48.0":
version "8.48.0"
resolved "https://registry.yarnpkg.com/@sentry/vercel-edge/-/vercel-edge-8.48.0.tgz#e2d429dc3f24b47f4bdae1e34856d3b258365b03"
integrity sha512-5bxMCTkadnvJvCC363ZXEdAHaWS/RAAvsI+8RAFObJO0tUemjKrgbHM/1YcvLRZSuBs6BSn9RjDipzzlFgtBWw==
dependencies:
"@opentelemetry/api" "^1.9.0"
"@sentry/core" "8.45.1"
"@sentry/core" "8.48.0"
"@sentry/webpack-plugin@2.22.7":
version "2.22.7"
@ -3714,10 +3714,10 @@
resolved "https://registry.yarnpkg.com/@types/negotiator/-/negotiator-0.6.3.tgz#29e8fce64e35f57f6fe9c624f8e4ed304357745a"
integrity sha512-JkXTOdKs5MF086b/pt8C3+yVp3iDUwG635L7oCH6HvJvvr6lSUU5oe/gLXnPEfYRROHjJIPgCV6cuAg8gGkntQ==
"@types/node@*", "@types/node@^22.0.0", "@types/node@^22.9.0":
version "22.10.2"
resolved "https://registry.yarnpkg.com/@types/node/-/node-22.10.2.tgz#a485426e6d1fdafc7b0d4c7b24e2c78182ddabb9"
integrity sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==
"@types/node@*", "@types/node@^22.0.0", "@types/node@^22.10.5":
version "22.10.5"
resolved "https://registry.yarnpkg.com/@types/node/-/node-22.10.5.tgz#95af89a3fb74a2bb41ef9927f206e6472026e48b"
integrity sha512-F8Q+SeGimwOo86fiovQh8qiXfFEh2/ocYv7tU5pJ3EXMSSxk1Joj5wefpFK2fHTf/N6HKGSxIDBT9f3gCxXPkQ==
dependencies:
undici-types "~6.20.0"
@ -3798,7 +3798,7 @@
resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.8.tgz#8268a8c57a3e4abd25c165ecd36237db7948a55e"
integrity sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==
"@types/shimmer@^1.0.2", "@types/shimmer@^1.2.0":
"@types/shimmer@^1.2.0":
version "1.2.0"
resolved "https://registry.yarnpkg.com/@types/shimmer/-/shimmer-1.2.0.tgz#9b706af96fa06416828842397a70dfbbf1c14ded"
integrity sha512-UE7oxhQLLd9gub6JKIAhDq06T0F6FnztwMNRvYgjeQSBeMc1ZG/tA47EwfduvkuQS8apbkM/lpLpWsaCeYsXVg==
@ -4990,10 +4990,10 @@ chokidar@^3.5.3, chokidar@^3.6.0:
optionalDependencies:
fsevents "~2.3.2"
chromatic@^11.12.5, chromatic@^11.15.0:
version "11.20.2"
resolved "https://registry.yarnpkg.com/chromatic/-/chromatic-11.20.2.tgz#10b309179cdc0b9195a5b68970366f9ebe67dfd1"
integrity sha512-c+M3HVl5Y60c7ipGTZTyeWzWubRW70YsJ7PPDpO1D735ib8+Lu3yGF90j61pvgkXGngpkTPHZyBw83lcu2JMxA==
chromatic@^11.15.0, chromatic@^11.22.0:
version "11.22.0"
resolved "https://registry.yarnpkg.com/chromatic/-/chromatic-11.22.0.tgz#9c2b05a0c5a94c5c0cc2b6be6969c112ac4543d0"
integrity sha512-u1kAPR9lj9aFzsCp0iWPXBbsKgcxFU7iJO6mFbgNHGVg+YPBqiJMuvgB8EQHdNbHjk5amFnGnIz/Ww8fK3t9Hw==
chrome-trace-event@^1.0.2:
version "1.0.4"
@ -5192,10 +5192,10 @@ concat-map@0.0.1:
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==
concurrently@^9.1.1:
version "9.1.1"
resolved "https://registry.yarnpkg.com/concurrently/-/concurrently-9.1.1.tgz#609dde2ce12f4f12d6a5ea6eace4c38bb7ab2ead"
integrity sha512-6VX8lrBIycgZKTwBsWS+bLrmkGRkDmvtGsYylRN9b93CygN6CbK46HmnQ3rdSOR8HRjdahDrxb5MqD9cEFOg5Q==
concurrently@^9.1.2:
version "9.1.2"
resolved "https://registry.yarnpkg.com/concurrently/-/concurrently-9.1.2.tgz#22d9109296961eaee773e12bfb1ce9a66bc9836c"
integrity sha512-H9MWcoPsYddwbOGM6difjVwVZHl63nwMEwDJG/L7VGtuaJhb12h2caPG2tVPWs7emuYix252iGfqOyrz1GczTQ==
dependencies:
chalk "^4.1.2"
lodash "^4.17.21"
@ -5540,6 +5540,11 @@ data-view-byte-offset@^1.0.0:
es-errors "^1.3.0"
is-data-view "^1.0.1"
date-fns-jalali@^4.1.0-0:
version "4.1.0-0"
resolved "https://registry.yarnpkg.com/date-fns-jalali/-/date-fns-jalali-4.1.0-0.tgz#9c7fb286004fab267a300d3e9f1ada9f10b4b6b0"
integrity sha512-hTIP/z+t+qKwBDcmmsnmjWTduxCg+5KfdqWQvb2X/8C9+knYY6epN/pfxdDuyVlSVeFz0sM5eEfwIUQ70U4ckg==
date-fns@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-4.1.0.tgz#64b3d83fff5aa80438f5b1a633c2e83b8a1c2d14"
@ -6241,10 +6246,10 @@ eslint-plugin-react@^7.37.0:
string.prototype.matchall "^4.0.11"
string.prototype.repeat "^1.0.0"
eslint-plugin-storybook@^0.11.0:
version "0.11.1"
resolved "https://registry.yarnpkg.com/eslint-plugin-storybook/-/eslint-plugin-storybook-0.11.1.tgz#4ef4f3550855fdc4a902296dfc278340ec287506"
integrity sha512-yGKpAYkBm/Q2hZg476vRUAvd9lAccjjSvzU5nYy3BSQbKTPy7uopx7JEpwk2vSuw4weTMZzWF64z9/gp/K5RCg==
eslint-plugin-storybook@^0.11.2:
version "0.11.2"
resolved "https://registry.yarnpkg.com/eslint-plugin-storybook/-/eslint-plugin-storybook-0.11.2.tgz#a46f8fa2b87d15f66251e832a10d5481fc73a028"
integrity sha512-0Z4DUklJrC+GHjCRXa7PYfPzWC15DaVnwaOYenpgXiCEijXPZkLKCms+rHhtoRcWccP7Z8DpOOaP1gc3P9oOwg==
dependencies:
"@storybook/csf" "^0.1.11"
"@typescript-eslint/utils" "^8.8.1"
@ -6695,13 +6700,13 @@ forwarded-parse@2.1.2:
resolved "https://registry.yarnpkg.com/forwarded-parse/-/forwarded-parse-2.1.2.tgz#08511eddaaa2ddfd56ba11138eee7df117a09325"
integrity sha512-alTFZZQDKMporBH77856pXgzhEzaUVmLCDk+egLgIgHst3Tpndzz8MnKe+GzRJRfvVdn69HhpW7cmXzvtLvJAw==
framer-motion@^11.15.0:
version "11.15.0"
resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-11.15.0.tgz#93e5d1839d500ba9cab1d617959a36142a61212b"
integrity sha512-MLk8IvZntxOMg7lDBLw2qgTHHv664bYoYmnFTmE0Gm/FW67aOJk0WM3ctMcG+Xhcv+vh5uyyXwxvxhSeJzSe+w==
framer-motion@^11.16.0:
version "11.16.0"
resolved "https://registry.yarnpkg.com/framer-motion/-/framer-motion-11.16.0.tgz#6592689bc8d6422207a55e48297eae99e9aa2eb2"
integrity sha512-oL2AWqLQuw0+CNEUa0sz3mWC/n3i147CckvpQn8bLRs30b+HxTxlRi0YR2FpHHhAbWV7DKjNdHU42KHLfBWh/g==
dependencies:
motion-dom "^11.14.3"
motion-utils "^11.14.3"
motion-dom "^11.16.0"
motion-utils "^11.16.0"
tslib "^2.4.0"
fromentries@^1.2.0:
@ -8398,10 +8403,10 @@ lru-cache@^5.1.1:
dependencies:
yallist "^3.0.2"
lucide-react@^0.468.0:
version "0.468.0"
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.468.0.tgz#830c1bfd905575ddd23b832baa420c87db166910"
integrity sha512-6koYRhnM2N0GGZIdXzSeiNwguv1gt/FAjZOiPl76roBi3xKEXa4WmfpxgQwTTL4KipXjefrnf3oV4IsYhi4JFA==
lucide-react@^0.469.0:
version "0.469.0"
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.469.0.tgz#f16936ca6521482fef754a7eabb310e6c68e1482"
integrity sha512-28vvUnnKQ/dBwiCQtwJw7QauYnE7yd2Cyp4tTTJpvglX4EMpbflcdBgrgToX2j71B3YvugK/NH3BGUk+E/p/Fw==
lz-string@^1.5.0:
version "1.5.0"
@ -8884,15 +8889,17 @@ moment@^2.30.1:
resolved "https://registry.yarnpkg.com/moment/-/moment-2.30.1.tgz#f8c91c07b7a786e30c59926df530b4eac96974ae"
integrity sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==
motion-dom@^11.14.3:
version "11.14.3"
resolved "https://registry.yarnpkg.com/motion-dom/-/motion-dom-11.14.3.tgz#725c72c0f1d0b632e42fdd8d13b69ecf9fe202c0"
integrity sha512-lW+D2wBy5vxLJi6aCP0xyxTxlTfiu+b+zcpVbGVFUxotwThqhdpPRSmX8xztAgtZMPMeU0WGVn/k1w4I+TbPqA==
motion-dom@^11.16.0:
version "11.16.0"
resolved "https://registry.yarnpkg.com/motion-dom/-/motion-dom-11.16.0.tgz#1766e4f9ada72c30ba8f3e698774372fcbcc95b3"
integrity sha512-4bmEwajSdrljzDAYpu6ceEdtI4J5PH25fmN8YSx7Qxk6OMrC10CXM0D5y+VO/pFZjhmCvm2bGf7Rus482kwhzA==
dependencies:
motion-utils "^11.16.0"
motion-utils@^11.14.3:
version "11.14.3"
resolved "https://registry.yarnpkg.com/motion-utils/-/motion-utils-11.14.3.tgz#cd4a413463739498411f82abb67b3dd58768b0f8"
integrity sha512-Xg+8xnqIJTpr0L/cidfTTBFkvRw26ZtGGuIhA94J9PQ2p4mEa06Xx7QVYZH0BP+EpMSaDlu+q0I0mmvwADPsaQ==
motion-utils@^11.16.0:
version "11.16.0"
resolved "https://registry.yarnpkg.com/motion-utils/-/motion-utils-11.16.0.tgz#e75865442278be49e411ca9105c9139edc572811"
integrity sha512-ngdWPjg31rD4WGXFi0eZ00DQQqKKu04QExyv/ymlC+3k+WIgYVFbt6gS5JsFPbJODTF/r8XiE/X+SsoT9c0ocw==
ms@^2.1.1, ms@^2.1.3:
version "2.1.3"
@ -9895,13 +9902,14 @@ react-confetti@^6.1.0:
dependencies:
tween-functions "^1.2.0"
react-day-picker@^9.4.4:
version "9.4.4"
resolved "https://registry.yarnpkg.com/react-day-picker/-/react-day-picker-9.4.4.tgz#1c514c86489ede20046eec957c68a7b31542bd49"
integrity sha512-1s+jA/bFYtoxhhr8M0kkFHLiMTSII6qU8UfDFprRAUStTVHljLTjg4oarvAngPlQ1cQAC+LUb0k/qMc+jjhmxw==
react-day-picker@^9.5.0:
version "9.5.0"
resolved "https://registry.yarnpkg.com/react-day-picker/-/react-day-picker-9.5.0.tgz#2ae36e85d6506026d72e350f49b5607d011cfd6f"
integrity sha512-WmJnPFVLnKh5Qscm7wavMNg86rqPverSWjx+zgK8/ZmGRSQ8c8OoqW10RI+AzAfT2atIxImpCUU2R9Z7Xb2SUA==
dependencies:
"@date-fns/tz" "^1.2.0"
date-fns "^4.1.0"
date-fns-jalali "^4.1.0-0"
react-docgen-typescript@^2.2.2:
version "2.2.2"
@ -9932,10 +9940,10 @@ react-docgen@^7.0.0:
loose-envify "^1.1.0"
scheduler "^0.23.2"
react-hook-form@^7.54.0:
version "7.54.1"
resolved "https://registry.yarnpkg.com/react-hook-form/-/react-hook-form-7.54.1.tgz#e99c2a55a5e4859fb47a8f55adf66b34d6ac331d"
integrity sha512-PUNzFwQeQ5oHiiTUO7GO/EJXGEtuun2Y1A59rLnZBBj+vNEOWt/3ERTiG1/zt7dVeJEM+4vDX/7XQ/qanuvPMg==
react-hook-form@^7.54.2:
version "7.54.2"
resolved "https://registry.yarnpkg.com/react-hook-form/-/react-hook-form-7.54.2.tgz#8c26ed54c71628dff57ccd3c074b1dd377cfb211"
integrity sha512-eHpAUgUjWbZocoQYUHposymRb4ZP6d0uwUnooL2uOybA9/3tPUvoAKqEWK1WaSiTxxOfTpffNZP7QwlnM3/gEg==
react-icons@^5.4.0:
version "5.4.0"
@ -9962,10 +9970,10 @@ react-lifecycles-compat@^3.0.0:
resolved "https://registry.yarnpkg.com/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz#4f1a273afdfc8f3488a8c516bfda78f872352362"
integrity sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==
react-markdown@^9.0.1:
version "9.0.1"
resolved "https://registry.yarnpkg.com/react-markdown/-/react-markdown-9.0.1.tgz#c05ddbff67fd3b3f839f8c648e6fb35d022397d1"
integrity sha512-186Gw/vF1uRkydbsOIkcGXw7aHq0sZOCRFFjGrr7b9+nVZg4UfA4enXCaxm4fUzecU38sWfrNDitGhshuU7rdg==
react-markdown@^9.0.3:
version "9.0.3"
resolved "https://registry.yarnpkg.com/react-markdown/-/react-markdown-9.0.3.tgz#c12bf60dad05e9bf650b86bcc612d80636e8456e"
integrity sha512-Yk7Z94dbgYTOrdk41Z74GoKA7rThnsbbqBTRYuxoe08qvfQ9tJVhmAKw6BJS/ZORG7kTy/s1QvYzSuaoBA1qfw==
dependencies:
"@types/hast" "^3.0.0"
devlop "^1.0.0"
@ -9978,10 +9986,10 @@ react-markdown@^9.0.1:
unist-util-visit "^5.0.0"
vfile "^6.0.0"
react-modal@^3.16.1:
version "3.16.1"
resolved "https://registry.yarnpkg.com/react-modal/-/react-modal-3.16.1.tgz#34018528fc206561b1a5467fc3beeaddafb39b2b"
integrity sha512-VStHgI3BVcGo7OXczvnJN7yT2TWHJPDXZWyI/a0ssFNhGZWsPmB8cF0z33ewDXq4VfYMO1vXgiv/g8Nj9NDyWg==
react-modal@^3.16.3:
version "3.16.3"
resolved "https://registry.yarnpkg.com/react-modal/-/react-modal-3.16.3.tgz#c412d41915782e3c261253435d01468e2439b11b"
integrity sha512-yCYRJB5YkeQDQlTt17WGAgFJ7jr2QYcWa1SHqZ3PluDmnKJ/7+tVU+E6uKyZ0nODaeEj+xCpK4LcSnKXLMC0Nw==
dependencies:
exenv "^1.2.0"
prop-types "^15.7.2"
@ -11035,10 +11043,10 @@ supports-preserve-symlinks-flag@^1.0.0:
resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09"
integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==
tailwind-merge@^2.5.5:
version "2.5.5"
resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-2.5.5.tgz#98167859b856a2a6b8d2baf038ee171b9d814e39"
integrity sha512-0LXunzzAZzo0tEPxV3I297ffKZPlKDrjj7NXphC8V5ak9yHC5zRmxnOe2m/Rd/7ivsOMJe3JZ2JVocoDdQTRBA==
tailwind-merge@^2.6.0:
version "2.6.0"
resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-2.6.0.tgz#ac5fb7e227910c038d458f396b7400d93a3142d5"
integrity sha512-P+Vu1qXfzediirmHOC3xKGAYeZtPcV9g76X+xg2FD4tYgR71ewMA35Y3sCz3zhiN/dwefRpJX0yBcgwi1fXNQA==
tailwindcss-animate@^1.0.7:
version "1.0.7"
@ -11553,10 +11561,10 @@ utila@~0.4:
resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c"
integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==
uuid@^11.0.3:
version "11.0.3"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-11.0.3.tgz#248451cac9d1a4a4128033e765d137e2b2c49a3d"
integrity sha512-d0z310fCWv5dJwnX1Y/MncBAqGMKEzlBb1AOf7z9K8ALnd0utBX/msg/fA0+sbyN1ihbMsLhrBlnl1ak7Wa0rg==
uuid@^11.0.4:
version "11.0.4"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-11.0.4.tgz#37943977894ef806d2919a7ca3f89d6e23c60bac"
integrity sha512-IzL6VtTTYcAhA/oghbFJ1Dkmqev+FpQWnCBaKq/gUluLxliWvO8DPFWfIviRmYbtaavtSQe4WBL++rFjdcGWEg==
uuid@^8.0.0, uuid@^8.3.2:
version "8.3.2"

View File

@ -1,12 +0,0 @@
DB_USER=postgres
DB_PASS=your-super-secret-and-long-postgres-password
DB_NAME=postgres
DB_PORT=5432
DATABASE_URL="postgresql://${DB_USER}:${DB_PASS}@localhost:${DB_PORT}/${DB_NAME}?connect_timeout=60&schema=market"
SENTRY_DSN=https://11d0640fef35640e0eb9f022eb7d7626@o4505260022104064.ingest.us.sentry.io/4507890252447744
ENABLE_AUTH=true
SUPABASE_JWT_SECRET=our-super-secret-jwt-token-with-at-least-32-characters-long
BACKEND_CORS_ALLOW_ORIGINS="http://localhost:3000,http://127.0.0.1:3000"
APP_ENV=local

View File

@ -1,6 +0,0 @@
database.db
database.db-journal
build/
config.json
secrets/*
!secrets/.gitkeep

View File

@ -1,75 +0,0 @@
FROM python:3.11.10-slim-bookworm AS builder
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
WORKDIR /app
RUN echo 'Acquire::http::Pipeline-Depth 0;\nAcquire::http::No-Cache true;\nAcquire::BrokenProxy true;\n' > /etc/apt/apt.conf.d/99fixbadproxy
RUN apt-get update --allow-releaseinfo-change --fix-missing
# Install build dependencies
RUN apt-get install -y build-essential
RUN apt-get install -y libpq5
RUN apt-get install -y libz-dev
RUN apt-get install -y libssl-dev
ENV POETRY_HOME="/opt/poetry" \
POETRY_NO_INTERACTION=1 \
POETRY_VIRTUALENVS_CREATE=false
ENV PATH="$POETRY_HOME/bin:$PATH"
# Upgrade pip and setuptools to fix security vulnerabilities
RUN pip3 install --upgrade pip setuptools
RUN pip3 install poetry
# Copy and install dependencies
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
COPY autogpt_platform/market/poetry.lock autogpt_platform/market/pyproject.toml /app/autogpt_platform/market/
WORKDIR /app/autogpt_platform/market
RUN poetry install --no-ansi --no-root
# Generate Prisma client
COPY autogpt_platform/market /app/autogpt_platform/market
RUN poetry install --no-ansi && \
poetry run prisma generate
FROM python:3.11.10-slim-bookworm AS server_dependencies
WORKDIR /app
ENV POETRY_HOME="/opt/poetry" \
POETRY_NO_INTERACTION=1 \
POETRY_VIRTUALENVS_CREATE=false
ENV PATH="$POETRY_HOME/bin:$PATH"
# Upgrade pip and setuptools to fix security vulnerabilities
RUN pip3 install --upgrade pip setuptools
# Copy only necessary files from builder
COPY --from=builder /app /app
COPY --from=builder /usr/local/lib/python3.11 /usr/local/lib/python3.11
COPY --from=builder /usr/local/bin /usr/local/bin
# Copy Prisma binaries
COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
ENV PATH="/app/.venv/bin:$PATH"
RUN mkdir -p /app/autogpt_platform/autogpt_libs
RUN mkdir -p /app/autogpt_platform/market
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
COPY autogpt_platform/market /app/autogpt_platform/market
WORKDIR /app/autogpt_platform/market
FROM server_dependencies AS server
ENV DATABASE_URL=""
ENV PORT=8015
CMD ["poetry", "run", "app"]

View File

@ -1,37 +0,0 @@
# AutoGPT Agent Marketplace
## Overview
AutoGPT Agent Marketplace is an open-source platform for autonomous AI agents. This project aims to create a user-friendly, accessible marketplace where users can discover, utilize, and contribute to a diverse ecosystem of AI solutions.
## Vision
Our vision is to empower users with customizable and free AI agents, fostering an open-source community that drives innovation in AI automation across various industries.
## Key Features
- Agent Discovery and Search
- Agent Listings with Detailed Information
- User Profiles
- Data Protection and Compliance
## Getting Started
To get started with the AutoGPT Agent Marketplace, follow these steps:
- Copy `.env.example` to `.env` and fill in the required environment variables
- Run `poetry run setup`
- Run `poetry run populate`
- Run `poetry run app`
## Poetry Run Commands
This section outlines the available command line scripts for this project, configured using Poetry. You can execute these scripts directly using Poetry. Each command performs a specific operation as described below:
- `poetry run format`: Runs the formatting script to ensure code consistency.
- `poetry run lint`: Executes the linting script to identify and fix potential code issues.
- `poetry run app`: Starts the main application.
- `poetry run setup`: Runs the setup script to configure the database.
- `poetry run populate`: Populates the database with initial data using the specified script.
To run any of these commands, ensure Poetry is installed on your system and execute the commands from the project's root directory.

View File

@ -1,16 +0,0 @@
version: "3"
services:
postgres:
image: ankane/pgvector:latest
environment:
POSTGRES_USER: ${DB_USER}
POSTGRES_PASSWORD: ${DB_PASS}
POSTGRES_DB: ${DB_NAME}
PGUSER: ${DB_USER}
healthcheck:
test: pg_isready -U $$POSTGRES_USER -d $$POSTGRES_DB
interval: 10s
timeout: 5s
retries: 5
ports:
- "${DB_PORT}:5432"

View File

@ -1,97 +0,0 @@
import contextlib
import logging.config
import os
import dotenv
import fastapi
import fastapi.middleware.cors
import fastapi.middleware.gzip
import prisma
import prometheus_fastapi_instrumentator
import sentry_sdk
import sentry_sdk.integrations.asyncio
import sentry_sdk.integrations.fastapi
import sentry_sdk.integrations.starlette
import market.config
import market.routes.admin
import market.routes.agents
import market.routes.analytics
import market.routes.search
import market.routes.submissions
dotenv.load_dotenv()
logging.config.dictConfig(market.config.LogConfig().model_dump())
if os.environ.get("SENTRY_DSN"):
sentry_sdk.init(
dsn=os.environ.get("SENTRY_DSN"),
traces_sample_rate=1.0,
profiles_sample_rate=1.0,
enable_tracing=True,
environment=os.environ.get("RUN_ENV", default="CLOUD").lower(),
integrations=[
sentry_sdk.integrations.starlette.StarletteIntegration(
transaction_style="url"
),
sentry_sdk.integrations.fastapi.FastApiIntegration(transaction_style="url"),
sentry_sdk.integrations.asyncio.AsyncioIntegration(),
],
)
db_client = prisma.Prisma(auto_register=True)
@contextlib.asynccontextmanager
async def lifespan(app: fastapi.FastAPI):
await db_client.connect()
yield
await db_client.disconnect()
docs_url = "/docs"
app = fastapi.FastAPI(
title="Marketplace API",
description="AutoGPT Marketplace API is a service that allows users to share AI agents.",
summary="Maketplace API",
version="0.1",
lifespan=lifespan,
root_path="/api/v1/market",
docs_url=docs_url,
)
app.add_middleware(fastapi.middleware.gzip.GZipMiddleware, minimum_size=1000)
app.add_middleware(
middleware_class=fastapi.middleware.cors.CORSMiddleware,
allow_origins=os.environ.get(
"BACKEND_CORS_ALLOW_ORIGINS", "http://localhost:3000,http://127.0.0.1:3000"
).split(","),
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(market.routes.agents.router, tags=["agents"])
app.include_router(market.routes.search.router, tags=["search"])
app.include_router(market.routes.submissions.router, tags=["submissions"])
app.include_router(market.routes.admin.router, prefix="/admin", tags=["admin"])
app.include_router(
market.routes.analytics.router, prefix="/analytics", tags=["analytics"]
)
@app.get("/health")
def health():
return fastapi.responses.HTMLResponse(
content="<h1>Marketplace API</h1>", status_code=200
)
@app.get("/")
def default():
return fastapi.responses.HTMLResponse(
content="<h1>Marketplace API</h1>", status_code=200
)
prometheus_fastapi_instrumentator.Instrumentator().instrument(app).expose(app)

View File

@ -1,30 +0,0 @@
from pydantic import BaseModel
class LogConfig(BaseModel):
"""Logging configuration to be set for the server"""
LOGGER_NAME: str = "marketplace"
LOG_FORMAT: str = "%(levelprefix)s | %(asctime)s | %(message)s"
LOG_LEVEL: str = "DEBUG"
# Logging config
version: int = 1
disable_existing_loggers: bool = False
formatters: dict = {
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": LOG_FORMAT,
"datefmt": "%Y-%m-%d %H:%M:%S",
},
}
handlers: dict = {
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
},
}
loggers: dict = {
LOGGER_NAME: {"handlers": ["default"], "level": LOG_LEVEL},
}

View File

@ -1,725 +0,0 @@
import datetime
import typing
import fuzzywuzzy.fuzz
import prisma.enums
import prisma.errors
import prisma.models
import prisma.types
import pydantic
import market.model
import market.utils.extension_types
class AgentQueryError(Exception):
"""Custom exception for agent query errors"""
pass
class TopAgentsDBResponse(pydantic.BaseModel):
"""
Represents a response containing a list of top agents.
Attributes:
analytics (list[AgentResponse]): The list of top agents.
total_count (int): The total count of agents.
page (int): The current page number.
page_size (int): The number of agents per page.
total_pages (int): The total number of pages.
"""
analytics: list[prisma.models.AnalyticsTracker]
total_count: int
page: int
page_size: int
total_pages: int
class FeaturedAgentResponse(pydantic.BaseModel):
"""
Represents a response containing a list of featured agents.
Attributes:
featured_agents (list[FeaturedAgent]): The list of featured agents.
total_count (int): The total count of featured agents.
page (int): The current page number.
page_size (int): The number of agents per page.
total_pages (int): The total number of pages.
"""
featured_agents: list[prisma.models.FeaturedAgent]
total_count: int
page: int
page_size: int
total_pages: int
async def delete_agent(agent_id: str) -> prisma.models.Agents | None:
"""
Delete an agent from the database.
Args:
agent_id (str): The ID of the agent to delete.
Returns:
prisma.models.Agents | None: The deleted agent if found, None otherwise.
Raises:
AgentQueryError: If there is an error deleting the agent from the database.
"""
try:
deleted_agent = await prisma.models.Agents.prisma().delete(
where={"id": agent_id}
)
return deleted_agent
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def create_agent_entry(
name: str,
description: str,
author: str,
keywords: typing.List[str],
categories: typing.List[str],
graph: prisma.Json,
submission_state: prisma.enums.SubmissionStatus = prisma.enums.SubmissionStatus.PENDING,
):
"""
Create a new agent entry in the database.
Args:
name (str): The name of the agent.
description (str): The description of the agent.
author (str): The author of the agent.
keywords (List[str]): The keywords associated with the agent.
categories (List[str]): The categories associated with the agent.
graph (dict): The graph data of the agent.
Returns:
dict: The newly created agent entry.
Raises:
AgentQueryError: If there is an error creating the agent entry.
"""
try:
agent = await prisma.models.Agents.prisma().create(
data={
"name": name,
"description": description,
"author": author,
"keywords": keywords,
"categories": categories,
"graph": graph,
"AnalyticsTracker": {"create": {"downloads": 0, "views": 0}},
"submissionStatus": submission_state,
}
)
return agent
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def update_agent_entry(
agent_id: str,
version: int,
submission_state: prisma.enums.SubmissionStatus,
comments: str | None = None,
) -> prisma.models.Agents | None:
"""
Update an existing agent entry in the database.
Args:
agent_id (str): The ID of the agent.
version (int): The version of the agent.
submission_state (prisma.enums.SubmissionStatus): The submission state of the agent.
"""
try:
agent = await prisma.models.Agents.prisma().update(
where={"id": agent_id},
data={
"version": version,
"submissionStatus": submission_state,
"submissionReviewDate": datetime.datetime.now(datetime.timezone.utc),
"submissionReviewComments": comments,
},
)
return agent
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Agent Update Failed Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def get_agents(
page: int = 1,
page_size: int = 10,
name: str | None = None,
keyword: str | None = None,
category: str | None = None,
description: str | None = None,
description_threshold: int = 60,
submission_status: prisma.enums.SubmissionStatus = prisma.enums.SubmissionStatus.APPROVED,
sort_by: str = "createdAt",
sort_order: typing.Literal["desc"] | typing.Literal["asc"] = "desc",
):
"""
Retrieve a list of agents from the database based on the provided filters and pagination parameters.
Args:
page (int, optional): The page number to retrieve. Defaults to 1.
page_size (int, optional): The number of agents per page. Defaults to 10.
name (str, optional): Filter agents by name. Defaults to None.
keyword (str, optional): Filter agents by keyword. Defaults to None.
category (str, optional): Filter agents by category. Defaults to None.
description (str, optional): Filter agents by description. Defaults to None.
description_threshold (int, optional): The minimum fuzzy search threshold for the description. Defaults to 60.
sort_by (str, optional): The field to sort the agents by. Defaults to "createdAt".
sort_order (str, optional): The sort order ("asc" or "desc"). Defaults to "desc".
Returns:
dict: A dictionary containing the list of agents, total count, current page number, page size, and total number of pages.
"""
try:
# Define the base query
query = {}
# Add optional filters
if name:
query["name"] = {"contains": name, "mode": "insensitive"}
if keyword:
query["keywords"] = {"has": keyword}
if category:
query["categories"] = {"has": category}
query["submissionStatus"] = submission_status
# Define sorting
order = {sort_by: sort_order}
# Calculate pagination
skip = (page - 1) * page_size
# Execute the query
try:
agents = await prisma.models.Agents.prisma().find_many(
where=query, # type: ignore
order=order, # type: ignore
skip=skip,
take=page_size,
)
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
# Apply fuzzy search on description if provided
if description:
try:
filtered_agents = []
for agent in agents:
if (
agent.description
and fuzzywuzzy.fuzz.partial_ratio(
description.lower(), agent.description.lower()
)
>= description_threshold
):
filtered_agents.append(agent)
agents = filtered_agents
except AttributeError as e:
raise AgentQueryError(f"Error during fuzzy search: {str(e)}")
# Get total count for pagination info
total_count = len(agents)
return {
"agents": agents,
"total_count": total_count,
"page": page,
"page_size": page_size,
"total_pages": (total_count + page_size - 1) // page_size,
}
except AgentQueryError as e:
# Log the error or handle it as needed
raise e
except ValueError as e:
raise AgentQueryError(f"Invalid input parameter: {str(e)}")
except Exception as e:
# Catch any other unexpected exceptions
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def get_agent_details(agent_id: str, version: int | None = None):
"""
Retrieve agent details from the database.
Args:
agent_id (str): The ID of the agent.
version (int | None, optional): The version of the agent. Defaults to None.
Returns:
dict: The agent details.
Raises:
AgentQueryError: If the agent is not found or if there is an error querying the database.
"""
try:
query = {"id": agent_id}
if version is not None:
query["version"] = version # type: ignore
agent = await prisma.models.Agents.prisma().find_first(where=query) # type: ignore
if not agent:
raise AgentQueryError("Agent not found")
return agent
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def search_db(
query: str,
page: int = 1,
page_size: int = 10,
categories: typing.List[str] | None = None,
description_threshold: int = 60,
sort_by: str = "rank",
sort_order: typing.Literal["desc"] | typing.Literal["asc"] = "desc",
submission_status: prisma.enums.SubmissionStatus = prisma.enums.SubmissionStatus.APPROVED,
) -> market.model.ListResponse[market.utils.extension_types.AgentsWithRank]:
"""Perform a search for agents based on the provided query string.
Args:
query (str): the search string
page (int, optional): page for searching. Defaults to 1.
page_size (int, optional): the number of results to return. Defaults to 10.
categories (List[str] | None, optional): list of category filters. Defaults to None.
description_threshold (int, optional): number of characters to return. Defaults to 60.
sort_by (str, optional): sort by option. Defaults to "rank".
sort_order ("asc" | "desc", optional): the sort order. Defaults to "desc".
Raises:
AgentQueryError: Raises an error if the query fails.
AgentQueryError: Raises if an unexpected error occurs.
Returns:
List[AgentsWithRank]: List of agents matching the search criteria.
"""
try:
offset = (page - 1) * page_size
category_filter = "1=1"
if categories:
category_conditions = [f"'{cat}' = ANY(categories)" for cat in categories]
category_filter = "AND (" + " OR ".join(category_conditions) + ")"
# Construct the ORDER BY clause based on the sort_by parameter
if sort_by in ["createdAt", "updatedAt"]:
order_by_clause = f'"{sort_by}" {sort_order.upper()}, rank DESC'
elif sort_by == "name":
order_by_clause = f"name {sort_order.upper()}, rank DESC"
else:
order_by_clause = 'rank DESC, "createdAt" DESC'
submission_status_filter = f""""submissionStatus" = '{submission_status}'"""
sql_query = f"""
WITH query AS (
SELECT to_tsquery(string_agg(lexeme || ':*', ' & ' ORDER BY positions)) AS q
FROM unnest(to_tsvector('{query}'))
)
SELECT
id,
"createdAt",
"updatedAt",
version,
name,
LEFT(description, {description_threshold}) AS description,
author,
keywords,
categories,
graph,
"submissionStatus",
"submissionDate",
CASE
WHEN query.q::text = '' THEN 1.0
ELSE COALESCE(ts_rank(CAST(search AS tsvector), query.q), 0.0)
END AS rank
FROM market."Agents", query
WHERE
(query.q::text = '' OR search @@ query.q)
AND {category_filter}
AND {submission_status_filter}
ORDER BY {order_by_clause}
LIMIT {page_size}
OFFSET {offset};
"""
results = await prisma.client.get_client().query_raw(
query=sql_query,
model=market.utils.extension_types.AgentsWithRank,
)
class CountResponse(pydantic.BaseModel):
count: int
count_query = f"""
WITH query AS (
SELECT to_tsquery(string_agg(lexeme || ':*', ' & ' ORDER BY positions)) AS q
FROM unnest(to_tsvector('{query}'))
)
SELECT COUNT(*)
FROM market."Agents", query
WHERE (search @@ query.q OR query.q = '') AND {category_filter} AND {submission_status_filter};
"""
total_count = await prisma.client.get_client().query_first(
query=count_query,
model=CountResponse,
)
total_count = total_count.count if total_count else 0
return market.model.ListResponse(
items=results,
total_count=total_count,
page=page,
page_size=page_size,
total_pages=(total_count + page_size - 1) // page_size,
)
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def get_top_agents_by_downloads(
page: int = 1,
page_size: int = 10,
submission_status: prisma.enums.SubmissionStatus = prisma.enums.SubmissionStatus.APPROVED,
) -> market.model.ListResponse[prisma.models.AnalyticsTracker]:
"""Retrieve the top agents by download count.
Args:
page (int, optional): The page number. Defaults to 1.
page_size (int, optional): The number of agents per page. Defaults to 10.
Returns:
dict: A dictionary containing the list of agents, total count, current page number, page size, and total number of pages.
"""
try:
# Calculate pagination
skip = (page - 1) * page_size
# Execute the query
try:
# Agents with no downloads will not be included in the results... is this the desired behavior?
analytics = await prisma.models.AnalyticsTracker.prisma().find_many(
include={"agent": True},
order={"downloads": "desc"},
where={"agent": {"is": {"submissionStatus": submission_status}}},
skip=skip,
take=page_size,
)
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
try:
total_count = await prisma.models.AnalyticsTracker.prisma().count(
where={"agent": {"is": {"submissionStatus": submission_status}}},
)
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
return market.model.ListResponse(
items=analytics,
total_count=total_count,
page=page,
page_size=page_size,
total_pages=(total_count + page_size - 1) // page_size,
)
except AgentQueryError as e:
# Log the error or handle it as needed
raise e from e
except ValueError as e:
raise AgentQueryError(f"Invalid input parameter: {str(e)}") from e
except Exception as e:
# Catch any other unexpected exceptions
raise AgentQueryError(f"Unexpected error occurred: {str(e)}") from e
async def set_agent_featured(
agent_id: str, is_active: bool = True, featured_categories: list[str] = ["featured"]
) -> prisma.models.FeaturedAgent:
"""Set an agent as featured in the database.
Args:
agent_id (str): The ID of the agent.
category (str, optional): The category to set the agent as featured. Defaults to "featured".
Raises:
AgentQueryError: If there is an error setting the agent as featured.
"""
try:
agent = await prisma.models.Agents.prisma().find_unique(where={"id": agent_id})
if not agent:
raise AgentQueryError(f"Agent with ID {agent_id} not found.")
featured = await prisma.models.FeaturedAgent.prisma().upsert(
where={"agentId": agent_id},
data={
"update": {
"featuredCategories": featured_categories,
"isActive": is_active,
},
"create": {
"featuredCategories": featured_categories,
"isActive": is_active,
"agent": {"connect": {"id": agent_id}},
},
},
)
return featured
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def get_featured_agents(
category: str = "featured",
page: int = 1,
page_size: int = 10,
submission_status: prisma.enums.SubmissionStatus = prisma.enums.SubmissionStatus.APPROVED,
) -> FeaturedAgentResponse:
"""Retrieve a list of featured agents from the database based on the provided category.
Args:
category (str, optional): The category of featured agents to retrieve. Defaults to "featured".
page (int, optional): The page number to retrieve. Defaults to 1.
page_size (int, optional): The number of agents per page. Defaults to 10.
Returns:
dict: A dictionary containing the list of featured agents, total count, current page number, page size, and total number of pages.
"""
try:
# Calculate pagination
skip = (page - 1) * page_size
# Execute the query
try:
featured_agents = await prisma.models.FeaturedAgent.prisma().find_many(
where={
"featuredCategories": {"has": category},
"isActive": True,
"agent": {"is": {"submissionStatus": submission_status}},
},
include={"agent": {"include": {"AnalyticsTracker": True}}},
skip=skip,
take=page_size,
)
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
# Get total count for pagination info
total_count = len(featured_agents)
return FeaturedAgentResponse(
featured_agents=featured_agents,
total_count=total_count,
page=page,
page_size=page_size,
total_pages=(total_count + page_size - 1) // page_size,
)
except AgentQueryError as e:
# Log the error or handle it as needed
raise e from e
except ValueError as e:
raise AgentQueryError(f"Invalid input parameter: {str(e)}") from e
except Exception as e:
# Catch any other unexpected exceptions
raise AgentQueryError(f"Unexpected error occurred: {str(e)}") from e
async def remove_featured_category(
agent_id: str, category: str
) -> prisma.models.FeaturedAgent | None:
"""Adds a featured category to an agent.
Args:
agent_id (str): The ID of the agent.
category (str): The category to add to the agent.
Returns:
FeaturedAgentResponse: The updated list of featured agents.
"""
try:
# get the existing categories
featured_agent = await prisma.models.FeaturedAgent.prisma().find_unique(
where={"agentId": agent_id},
include={"agent": True},
)
if not featured_agent:
raise AgentQueryError(f"Agent with ID {agent_id} not found.")
# remove the category from the list
featured_agent.featuredCategories.remove(category)
featured_agent = await prisma.models.FeaturedAgent.prisma().update(
where={"agentId": agent_id},
data={"featuredCategories": featured_agent.featuredCategories},
)
return featured_agent
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def add_featured_category(
agent_id: str, category: str
) -> prisma.models.FeaturedAgent | None:
"""Removes a featured category from an agent.
Args:
agent_id (str): The ID of the agent.
category (str): The category to remove from the agent.
Returns:
FeaturedAgentResponse: The updated list of featured agents.
"""
try:
featured_agent = await prisma.models.FeaturedAgent.prisma().update(
where={"agentId": agent_id},
data={"featuredCategories": {"push": [category]}},
)
return featured_agent
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def get_agent_featured(agent_id: str) -> prisma.models.FeaturedAgent | None:
"""Retrieve an agent's featured categories from the database.
Args:
agent_id (str): The ID of the agent.
Returns:
FeaturedAgentResponse: The list of featured agents.
"""
try:
featured_agent = await prisma.models.FeaturedAgent.prisma().find_unique(
where={"agentId": agent_id},
)
return featured_agent
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def get_not_featured_agents(
page: int = 1, page_size: int = 10
) -> typing.List[prisma.models.Agents]:
"""
Retrieve a list of not featured agents from the database.
"""
try:
agents = await prisma.client.get_client().query_raw(
query=f"""
SELECT
"market"."Agents".id,
"market"."Agents"."createdAt",
"market"."Agents"."updatedAt",
"market"."Agents".version,
"market"."Agents".name,
LEFT("market"."Agents".description, 500) AS description,
"market"."Agents".author,
"market"."Agents".keywords,
"market"."Agents".categories,
"market"."Agents".graph,
"market"."Agents"."submissionStatus",
"market"."Agents"."submissionDate",
"market"."Agents".search::text AS search
FROM "market"."Agents"
LEFT JOIN "market"."FeaturedAgent" ON "market"."Agents"."id" = "market"."FeaturedAgent"."agentId"
WHERE ("market"."FeaturedAgent"."agentId" IS NULL OR "market"."FeaturedAgent"."featuredCategories" = '{{}}')
AND "market"."Agents"."submissionStatus" = 'APPROVED'
ORDER BY "market"."Agents"."createdAt" DESC
LIMIT {page_size} OFFSET {page_size * (page - 1)}
""",
model=prisma.models.Agents,
)
return agents
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
async def get_all_categories() -> market.model.CategoriesResponse:
"""
Retrieve all unique categories from the database.
Returns:
CategoriesResponse: A list of unique categories.
"""
try:
agents = await prisma.models.Agents.prisma().find_many(distinct=["categories"])
# Aggregate categories on the Python side
all_categories = set()
for agent in agents:
all_categories.update(agent.categories)
unique_categories = sorted(list(all_categories))
return market.model.CategoriesResponse(unique_categories=unique_categories)
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception:
# Return an empty list of categories in case of unexpected errors
return market.model.CategoriesResponse(unique_categories=[])
async def create_agent_installed_event(
event_data: market.model.AgentInstalledFromMarketplaceEventData,
):
try:
await prisma.models.InstallTracker.prisma().create(
data={
"installedAgentId": event_data.installed_agent_id,
"marketplaceAgentId": event_data.marketplace_agent_id,
"installationLocation": prisma.enums.InstallationLocation(
event_data.installation_location.name
),
}
)
except prisma.errors.PrismaError as e:
raise AgentQueryError(f"Database query failed: {str(e)}")
except Exception as e:
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")

View File

@ -1,161 +0,0 @@
import datetime
import typing
from enum import Enum
from typing import Generic, Literal, TypeVar, Union
import prisma.enums
import pydantic
class InstallationLocation(str, Enum):
LOCAL = "local"
CLOUD = "cloud"
class AgentInstalledFromMarketplaceEventData(pydantic.BaseModel):
marketplace_agent_id: str
installed_agent_id: str
installation_location: InstallationLocation
class AgentInstalledFromTemplateEventData(pydantic.BaseModel):
template_id: str
installed_agent_id: str
installation_location: InstallationLocation
class AgentInstalledFromMarketplaceEvent(pydantic.BaseModel):
event_name: Literal["agent_installed_from_marketplace"]
event_data: AgentInstalledFromMarketplaceEventData
class AgentInstalledFromTemplateEvent(pydantic.BaseModel):
event_name: Literal["agent_installed_from_template"]
event_data: AgentInstalledFromTemplateEventData
AnalyticsEvent = Union[
AgentInstalledFromMarketplaceEvent, AgentInstalledFromTemplateEvent
]
class AnalyticsRequest(pydantic.BaseModel):
event: AnalyticsEvent
class AddAgentRequest(pydantic.BaseModel):
graph: dict[str, typing.Any]
author: str
keywords: list[str]
categories: list[str]
class SubmissionReviewRequest(pydantic.BaseModel):
agent_id: str
version: int
status: prisma.enums.SubmissionStatus
comments: str | None
class AgentResponse(pydantic.BaseModel):
"""
Represents a response from an agent.
Attributes:
id (str): The ID of the agent.
name (str, optional): The name of the agent.
description (str, optional): The description of the agent.
author (str, optional): The author of the agent.
keywords (list[str]): The keywords associated with the agent.
categories (list[str]): The categories the agent belongs to.
version (int): The version of the agent.
createdAt (str): The creation date of the agent.
updatedAt (str): The last update date of the agent.
"""
id: str
name: typing.Optional[str]
description: typing.Optional[str]
author: typing.Optional[str]
keywords: list[str]
categories: list[str]
version: int
createdAt: datetime.datetime
updatedAt: datetime.datetime
submissionStatus: str
views: int = 0
downloads: int = 0
class AgentDetailResponse(pydantic.BaseModel):
"""
Represents the response data for an agent detail.
Attributes:
id (str): The ID of the agent.
name (Optional[str]): The name of the agent.
description (Optional[str]): The description of the agent.
author (Optional[str]): The author of the agent.
keywords (List[str]): The keywords associated with the agent.
categories (List[str]): The categories the agent belongs to.
version (int): The version of the agent.
createdAt (str): The creation date of the agent.
updatedAt (str): The last update date of the agent.
graph (Dict[str, Any]): The graph data of the agent.
"""
id: str
name: typing.Optional[str]
description: typing.Optional[str]
author: typing.Optional[str]
keywords: list[str]
categories: list[str]
version: int
createdAt: datetime.datetime
updatedAt: datetime.datetime
graph: dict[str, typing.Any]
class FeaturedAgentResponse(pydantic.BaseModel):
"""
Represents the response data for an agent detail.
"""
agentId: str
featuredCategories: list[str]
createdAt: datetime.datetime
updatedAt: datetime.datetime
isActive: bool
class CategoriesResponse(pydantic.BaseModel):
"""
Represents the response data for a list of categories.
Attributes:
unique_categories (list[str]): The list of unique categories.
"""
unique_categories: list[str]
T = TypeVar("T")
class ListResponse(pydantic.BaseModel, Generic[T]):
"""
Represents a list response.
Attributes:
items (list[T]): The list of items.
total_count (int): The total count of items.
page (int): The current page number.
page_size (int): The number of items per page.
total_pages (int): The total number of pages.
"""
items: list[T]
total_count: int
page: int
page_size: int
total_pages: int

View File

@ -1,286 +0,0 @@
import logging
import typing
import autogpt_libs.auth
import fastapi
import prisma
import prisma.enums
import prisma.models
import market.db
import market.model
logger = logging.getLogger("marketplace")
router = fastapi.APIRouter()
@router.delete("/agent/{agent_id}", response_model=market.model.AgentResponse)
async def delete_agent(
agent_id: str,
user: autogpt_libs.auth.User = fastapi.Depends(
autogpt_libs.auth.requires_admin_user
),
):
"""
Delete an agent and all related records from the database.
Args:
agent_id (str): The ID of the agent to delete.
Returns:
market.model.AgentResponse: The deleted agent's data.
Raises:
fastapi.HTTPException: If the agent is not found or if there's an error during deletion.
"""
try:
deleted_agent = await market.db.delete_agent(agent_id)
if deleted_agent:
return market.model.AgentResponse(**deleted_agent.dict())
else:
raise fastapi.HTTPException(status_code=404, detail="Agent not found")
except market.db.AgentQueryError as e:
logger.error(f"Error deleting agent: {e}")
raise fastapi.HTTPException(status_code=500, detail=str(e))
except Exception as e:
logger.error(f"Unexpected error deleting agent: {e}")
raise fastapi.HTTPException(
status_code=500, detail="An unexpected error occurred"
)
@router.post("/agent", response_model=market.model.AgentResponse)
async def create_agent_entry(
request: market.model.AddAgentRequest,
user: autogpt_libs.auth.User = fastapi.Depends(
autogpt_libs.auth.requires_admin_user
),
):
"""
A basic endpoint to create a new agent entry in the database.
"""
try:
agent = await market.db.create_agent_entry(
request.graph["name"],
request.graph["description"],
request.author,
request.keywords,
request.categories,
prisma.Json(request.graph),
)
return fastapi.responses.PlainTextResponse(agent.model_dump_json())
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
@router.post("/agent/featured/{agent_id}")
async def set_agent_featured(
agent_id: str,
categories: list[str] = fastapi.Query(
default=["featured"],
description="The categories to set the agent as featured in",
),
user: autogpt_libs.auth.User = fastapi.Depends(
autogpt_libs.auth.requires_admin_user
),
) -> market.model.FeaturedAgentResponse:
"""
A basic endpoint to set an agent as featured in the database.
"""
try:
agent = await market.db.set_agent_featured(
agent_id, is_active=True, featured_categories=categories
)
return market.model.FeaturedAgentResponse(**agent.model_dump())
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
@router.get("/agent/featured/{agent_id}")
async def get_agent_featured(
agent_id: str,
user: autogpt_libs.auth.User = fastapi.Depends(
autogpt_libs.auth.requires_admin_user
),
) -> market.model.FeaturedAgentResponse | None:
"""
A basic endpoint to get an agent as featured in the database.
"""
try:
agent = await market.db.get_agent_featured(agent_id)
if agent:
return market.model.FeaturedAgentResponse(**agent.model_dump())
else:
return None
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
@router.delete("/agent/featured/{agent_id}")
async def unset_agent_featured(
agent_id: str,
category: str = "featured",
user: autogpt_libs.auth.User = fastapi.Depends(
autogpt_libs.auth.requires_admin_user
),
) -> market.model.FeaturedAgentResponse | None:
"""
A basic endpoint to unset an agent as featured in the database.
"""
try:
featured = await market.db.remove_featured_category(agent_id, category=category)
if featured:
return market.model.FeaturedAgentResponse(**featured.model_dump())
else:
return None
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
@router.get("/agent/not-featured")
async def get_not_featured_agents(
page: int = fastapi.Query(1, ge=1, description="Page number"),
page_size: int = fastapi.Query(
10, ge=1, le=100, description="Number of items per page"
),
user: autogpt_libs.auth.User = fastapi.Depends(
autogpt_libs.auth.requires_admin_user
),
) -> market.model.ListResponse[market.model.AgentResponse]:
"""
A basic endpoint to get all not featured agents in the database.
"""
try:
agents = await market.db.get_not_featured_agents(page=page, page_size=page_size)
return market.model.ListResponse(
items=[
market.model.AgentResponse(**agent.model_dump()) for agent in agents
],
total_count=len(agents),
page=page,
page_size=page_size,
total_pages=999,
)
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
@router.get(
"/agent/submissions",
response_model=market.model.ListResponse[market.model.AgentResponse],
)
async def get_agent_submissions(
page: int = fastapi.Query(1, ge=1, description="Page number"),
page_size: int = fastapi.Query(
10, ge=1, le=100, description="Number of items per page"
),
name: typing.Optional[str] = fastapi.Query(
None, description="Filter by agent name"
),
keyword: typing.Optional[str] = fastapi.Query(
None, description="Filter by keyword"
),
category: typing.Optional[str] = fastapi.Query(
None, description="Filter by category"
),
description: typing.Optional[str] = fastapi.Query(
None, description="Fuzzy search in description"
),
description_threshold: int = fastapi.Query(
60, ge=0, le=100, description="Fuzzy search threshold"
),
sort_by: str = fastapi.Query("createdAt", description="Field to sort by"),
sort_order: typing.Literal["asc", "desc"] = fastapi.Query(
"desc", description="Sort order (asc or desc)"
),
user: autogpt_libs.auth.User = fastapi.Depends(
autogpt_libs.auth.requires_admin_user
),
) -> market.model.ListResponse[market.model.AgentResponse]:
logger.info("Getting agent submissions")
try:
result = await market.db.get_agents(
page=page,
page_size=page_size,
name=name,
keyword=keyword,
category=category,
description=description,
description_threshold=description_threshold,
sort_by=sort_by,
sort_order=sort_order,
submission_status=prisma.enums.SubmissionStatus.PENDING,
)
agents = [
market.model.AgentResponse(**agent.dict()) for agent in result["agents"]
]
return market.model.ListResponse(
items=agents,
total_count=result["total_count"],
page=result["page"],
page_size=result["page_size"],
total_pages=result["total_pages"],
)
except market.db.AgentQueryError as e:
logger.error(f"Error getting agent submissions: {e}")
raise fastapi.HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f"Error getting agent submissions: {e}")
raise fastapi.HTTPException(
status_code=500, detail=f"An unexpected error occurred: {e}"
)
@router.post("/agent/submissions")
async def review_submission(
review_request: market.model.SubmissionReviewRequest,
user: autogpt_libs.auth.User = fastapi.Depends(
autogpt_libs.auth.requires_admin_user
),
) -> prisma.models.Agents | None:
"""
A basic endpoint to review a submission in the database.
"""
logger.info(
f"Reviewing submission: {review_request.agent_id}, {review_request.version}"
)
try:
agent = await market.db.update_agent_entry(
agent_id=review_request.agent_id,
version=review_request.version,
submission_state=review_request.status,
comments=review_request.comments,
)
return agent
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
@router.get("/categories")
async def get_categories() -> market.model.CategoriesResponse:
"""
A basic endpoint to get all available categories.
"""
try:
categories = await market.db.get_all_categories()
return categories
except Exception as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))

View File

@ -1,76 +0,0 @@
import datetime
from unittest import mock
import autogpt_libs.auth.middleware
import fastapi
import fastapi.testclient
import prisma.enums
import prisma.models
import market.app
client = fastapi.testclient.TestClient(market.app.app)
async def override_auth_middleware(request: fastapi.Request):
return {"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "role": "admin"}
market.app.app.dependency_overrides[autogpt_libs.auth.middleware.auth_middleware] = (
override_auth_middleware
)
def test_get_submissions():
with mock.patch("market.db.get_agents") as mock_get_agents:
mock_get_agents.return_value = {
"agents": [],
"total_count": 0,
"page": 1,
"page_size": 10,
"total_pages": 0,
}
response = client.get(
"/api/v1/market/admin/agent/submissions?page=1&page_size=10&description_threshold=60&sort_by=createdAt&sort_order=desc",
headers={"Bearer": ""},
)
assert response.status_code == 200
assert response.json() == {
"agents": [],
"total_count": 0,
"page": 1,
"page_size": 10,
"total_pages": 0,
}
def test_review_submission():
with mock.patch("market.db.update_agent_entry") as mock_update_agent_entry:
mock_update_agent_entry.return_value = prisma.models.Agents(
id="aaa-bbb-ccc",
version=1,
createdAt=datetime.datetime.fromisoformat("2021-10-01T00:00:00+00:00"),
updatedAt=datetime.datetime.fromisoformat("2021-10-01T00:00:00+00:00"),
submissionStatus=prisma.enums.SubmissionStatus.APPROVED,
submissionDate=datetime.datetime.fromisoformat("2021-10-01T00:00:00+00:00"),
submissionReviewComments="Looks good",
submissionReviewDate=datetime.datetime.fromisoformat(
"2021-10-01T00:00:00+00:00"
),
keywords=["test"],
categories=["test"],
graph='{"name": "test", "description": "test"}', # type: ignore
)
response = client.post(
"/api/v1/market/admin/agent/submissions",
headers={
"Authorization": "Bearer token"
}, # Assuming you need an authorization token
json={
"agent_id": "aaa-bbb-ccc",
"version": 1,
"status": "APPROVED",
"comments": "Looks good",
},
)
assert response.status_code == 200

View File

@ -1,368 +0,0 @@
import json
import tempfile
import typing
import fastapi
import fastapi.responses
import prisma
import prisma.enums
import market.db
import market.model
import market.utils.analytics
router = fastapi.APIRouter()
@router.get(
"/agents", response_model=market.model.ListResponse[market.model.AgentResponse]
)
async def list_agents(
page: int = fastapi.Query(1, ge=1, description="Page number"),
page_size: int = fastapi.Query(
10, ge=1, le=100, description="Number of items per page"
),
name: typing.Optional[str] = fastapi.Query(
None, description="Filter by agent name"
),
keyword: typing.Optional[str] = fastapi.Query(
None, description="Filter by keyword"
),
category: typing.Optional[str] = fastapi.Query(
None, description="Filter by category"
),
description: typing.Optional[str] = fastapi.Query(
None, description="Fuzzy search in description"
),
description_threshold: int = fastapi.Query(
60, ge=0, le=100, description="Fuzzy search threshold"
),
sort_by: str = fastapi.Query("createdAt", description="Field to sort by"),
sort_order: typing.Literal["asc", "desc"] = fastapi.Query(
"desc", description="Sort order (asc or desc)"
),
submission_status: prisma.enums.SubmissionStatus = fastapi.Query(
default=prisma.enums.SubmissionStatus.APPROVED,
description="Filter by submission status",
),
):
"""
Retrieve a list of agents based on the provided filters.
Args:
page (int): Page number (default: 1).
page_size (int): Number of items per page (default: 10, min: 1, max: 100).
name (str, optional): Filter by agent name.
keyword (str, optional): Filter by keyword.
category (str, optional): Filter by category.
description (str, optional): Fuzzy search in description.
description_threshold (int): Fuzzy search threshold (default: 60, min: 0, max: 100).
sort_by (str): Field to sort by (default: "createdAt").
sort_order (str): Sort order (asc or desc) (default: "desc").
submission_status (str): Filter by submission status (default: "APPROVED").
Returns:
market.model.ListResponse[market.model.AgentResponse]: A response containing the list of agents and pagination information.
Raises:
HTTPException: If there is a client error (status code 400) or an unexpected error (status code 500).
"""
try:
result = await market.db.get_agents(
page=page,
page_size=page_size,
name=name,
keyword=keyword,
category=category,
description=description,
description_threshold=description_threshold,
sort_by=sort_by,
sort_order=sort_order,
submission_status=submission_status,
)
agents = [
market.model.AgentResponse(**agent.dict()) for agent in result["agents"]
]
return market.model.ListResponse(
items=agents,
total_count=result["total_count"],
page=result["page"],
page_size=result["page_size"],
total_pages=result["total_pages"],
)
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=400, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(
status_code=500, detail=f"An unexpected error occurred: {e}"
)
@router.get("/agents/{agent_id}", response_model=market.model.AgentDetailResponse)
async def get_agent_details_endpoint(
background_tasks: fastapi.BackgroundTasks,
agent_id: str = fastapi.Path(..., description="The ID of the agent to retrieve"),
version: typing.Optional[int] = fastapi.Query(
None, description="Specific version of the agent"
),
):
"""
Retrieve details of a specific agent.
Args:
agent_id (str): The ID of the agent to retrieve.
version (Optional[int]): Specific version of the agent (default: None).
Returns:
market.model.AgentDetailResponse: The response containing the agent details.
Raises:
HTTPException: If the agent is not found or an unexpected error occurs.
"""
try:
agent = await market.db.get_agent_details(agent_id, version)
background_tasks.add_task(market.utils.analytics.track_view, agent_id)
return market.model.AgentDetailResponse(**agent.model_dump())
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=404, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(
status_code=500, detail=f"An unexpected error occurred: {str(e)}"
)
@router.get("/agents/{agent_id}/download")
async def download_agent(
background_tasks: fastapi.BackgroundTasks,
agent_id: str = fastapi.Path(..., description="The ID of the agent to retrieve"),
version: typing.Optional[int] = fastapi.Query(
None, description="Specific version of the agent"
),
):
"""
Download details of a specific agent.
NOTE: This is the same as agent details, however it also triggers
the "download" tracking. We don't actually want to download a file though
Args:
agent_id (str): The ID of the agent to retrieve.
version (Optional[int]): Specific version of the agent (default: None).
Returns:
market.model.AgentDetailResponse: The response containing the agent details.
Raises:
HTTPException: If the agent is not found or an unexpected error occurs.
"""
try:
agent = await market.db.get_agent_details(agent_id, version)
background_tasks.add_task(market.utils.analytics.track_download, agent_id)
return market.model.AgentDetailResponse(**agent.model_dump())
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=404, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(
status_code=500, detail=f"An unexpected error occurred: {str(e)}"
)
@router.get("/agents/{agent_id}/download-file")
async def download_agent_file(
background_tasks: fastapi.BackgroundTasks,
agent_id: str = fastapi.Path(..., description="The ID of the agent to download"),
version: typing.Optional[int] = fastapi.Query(
None, description="Specific version of the agent"
),
) -> fastapi.responses.FileResponse:
"""
Download the agent file by streaming its content.
Args:
agent_id (str): The ID of the agent to download.
version (Optional[int]): Specific version of the agent to download.
Returns:
StreamingResponse: A streaming response containing the agent's graph data.
Raises:
HTTPException: If the agent is not found or an unexpected error occurs.
"""
agent = await market.db.get_agent_details(agent_id, version)
graph_data: prisma.Json = agent.graph
background_tasks.add_task(market.utils.analytics.track_download, agent_id)
file_name = f"agent_{agent_id}_v{version or 'latest'}.json"
with tempfile.NamedTemporaryFile(
mode="w", suffix=".json", delete=False
) as tmp_file:
tmp_file.write(json.dumps(graph_data))
tmp_file.flush()
return fastapi.responses.FileResponse(
tmp_file.name, filename=file_name, media_type="application/json"
)
# top agents by downloads
@router.get(
"/top-downloads/agents",
response_model=market.model.ListResponse[market.model.AgentResponse],
)
async def top_agents_by_downloads(
page: int = fastapi.Query(1, ge=1, description="Page number"),
page_size: int = fastapi.Query(
10, ge=1, le=100, description="Number of items per page"
),
submission_status: prisma.enums.SubmissionStatus = fastapi.Query(
default=prisma.enums.SubmissionStatus.APPROVED,
description="Filter by submission status",
),
) -> market.model.ListResponse[market.model.AgentResponse]:
"""
Retrieve a list of top agents based on the number of downloads.
Args:
page (int): Page number (default: 1).
page_size (int): Number of items per page (default: 10, min: 1, max: 100).
submission_status (str): Filter by submission status (default: "APPROVED").
Returns:
market.model.ListResponse[market.model.AgentResponse]: A response containing the list of top agents and pagination information.
Raises:
HTTPException: If there is a client error (status code 400) or an unexpected error (status code 500).
"""
try:
result = await market.db.get_top_agents_by_downloads(
page=page,
page_size=page_size,
submission_status=submission_status,
)
ret = market.model.ListResponse(
total_count=result.total_count,
page=result.page,
page_size=result.page_size,
total_pages=result.total_pages,
items=[
market.model.AgentResponse(
id=item.agent.id,
name=item.agent.name,
description=item.agent.description,
author=item.agent.author,
keywords=item.agent.keywords,
categories=item.agent.categories,
version=item.agent.version,
createdAt=item.agent.createdAt,
updatedAt=item.agent.updatedAt,
views=item.views,
downloads=item.downloads,
submissionStatus=item.agent.submissionStatus,
)
for item in result.items
if item.agent is not None
],
)
return ret
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=400, detail=str(e)) from e
except Exception as e:
raise fastapi.HTTPException(
status_code=500, detail=f"An unexpected error occurred: {e}"
) from e
@router.get(
"/featured/agents",
response_model=market.model.ListResponse[market.model.AgentResponse],
)
async def get_featured_agents(
category: str = fastapi.Query(
"featured", description="Category of featured agents"
),
page: int = fastapi.Query(1, ge=1, description="Page number"),
page_size: int = fastapi.Query(
10, ge=1, le=100, description="Number of items per page"
),
submission_status: prisma.enums.SubmissionStatus = fastapi.Query(
default=prisma.enums.SubmissionStatus.APPROVED,
description="Filter by submission status",
),
):
"""
Retrieve a list of featured agents based on the provided category.
Args:
category (str): Category of featured agents (default: "featured").
page (int): Page number (default: 1).
page_size (int): Number of items per page (default: 10, min: 1, max: 100).
submission_status (str): Filter by submission status (default: "APPROVED").
Returns:
market.model.ListResponse[market.model.AgentResponse]: A response containing the list of featured agents and pagination information.
Raises:
HTTPException: If there is a client error (status code 400) or an unexpected error (status code 500).
"""
try:
result = await market.db.get_featured_agents(
category=category,
page=page,
page_size=page_size,
submission_status=submission_status,
)
ret = market.model.ListResponse(
total_count=result.total_count,
page=result.page,
page_size=result.page_size,
total_pages=result.total_pages,
items=[
market.model.AgentResponse(
id=item.agent.id,
name=item.agent.name,
description=item.agent.description,
author=item.agent.author,
keywords=item.agent.keywords,
categories=item.agent.categories,
version=item.agent.version,
createdAt=item.agent.createdAt,
updatedAt=item.agent.updatedAt,
views=(
item.agent.AnalyticsTracker[0].views
if item.agent.AnalyticsTracker
and len(item.agent.AnalyticsTracker) > 0
else 0
),
downloads=(
item.agent.AnalyticsTracker[0].downloads
if item.agent.AnalyticsTracker
and len(item.agent.AnalyticsTracker) > 0
else 0
),
submissionStatus=item.agent.submissionStatus,
)
for item in result.featured_agents
if item.agent is not None
],
)
return ret
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=400, detail=str(e)) from e
except Exception as e:
raise fastapi.HTTPException(
status_code=500, detail=f"An unexpected error occurred: {e}"
) from e

View File

@ -1,26 +0,0 @@
import fastapi
import market.db
import market.model
router = fastapi.APIRouter()
@router.post("/agent-installed")
async def agent_installed_endpoint(
event_data: market.model.AgentInstalledFromMarketplaceEventData,
):
"""
Endpoint to track agent installation events from the marketplace.
Args:
event_data (market.model.AgentInstalledFromMarketplaceEventData): The event data.
"""
try:
await market.db.create_agent_installed_event(event_data)
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=400, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(
status_code=500, detail=f"An unexpected error occurred: {e}"
)

View File

@ -1,56 +0,0 @@
import typing
import fastapi
import prisma.enums
import market.db
import market.model
import market.utils.extension_types
router = fastapi.APIRouter()
@router.get("/search")
async def search(
query: str,
page: int = fastapi.Query(1, description="The pagination page to start on"),
page_size: int = fastapi.Query(
10, description="The number of items to return per page"
),
categories: typing.List[str] = fastapi.Query(
None, description="The categories to filter by"
),
description_threshold: int = fastapi.Query(
60, description="The number of characters to return from the description"
),
sort_by: str = fastapi.Query("rank", description="Sorting by column"),
sort_order: typing.Literal["desc", "asc"] = fastapi.Query(
"desc", description="The sort order based on sort_by"
),
submission_status: prisma.enums.SubmissionStatus = fastapi.Query(
prisma.enums.SubmissionStatus.APPROVED,
description="The submission status to filter by",
),
) -> market.model.ListResponse[market.utils.extension_types.AgentsWithRank]:
"""searches endpoint for agents
Args:
query (str): the search query
page (int, optional): the pagination page to start on. Defaults to 1.
page_size (int, optional): the number of items to return per page. Defaults to 10.
category (str | None, optional): the agent category to filter by. None is no filter. Defaults to None.
description_threshold (int, optional): the number of characters to return from the description. Defaults to 60.
sort_by (str, optional): Sorting by column. Defaults to "rank".
sort_order ('asc' | 'desc', optional): the sort order based on sort_by. Defaults to "desc".
"""
agents = await market.db.search_db(
query=query,
page=page,
page_size=page_size,
categories=categories,
description_threshold=description_threshold,
sort_by=sort_by,
sort_order=sort_order,
submission_status=submission_status,
)
return agents

View File

@ -1,35 +0,0 @@
import autogpt_libs.auth
import fastapi
import fastapi.responses
import prisma
import market.db
import market.model
import market.utils.analytics
router = fastapi.APIRouter()
@router.post("/agents/submit", response_model=market.model.AgentResponse)
async def submit_agent(
request: market.model.AddAgentRequest,
user: autogpt_libs.auth.User = fastapi.Depends(autogpt_libs.auth.requires_user),
):
"""
A basic endpoint to create a new agent entry in the database.
"""
try:
agent = await market.db.create_agent_entry(
request.graph["name"],
request.graph["description"],
request.author,
request.keywords,
request.categories,
prisma.Json(request.graph),
)
return fastapi.responses.PlainTextResponse(agent.model_dump_json())
except market.db.AgentQueryError as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))
except Exception as e:
raise fastapi.HTTPException(status_code=500, detail=str(e))

View File

@ -1,47 +0,0 @@
import prisma.models
async def track_download(agent_id: str):
"""
Track the download event in the database.
Args:
agent_id (str): The ID of the agent.
version (int | None, optional): The version of the agent. Defaults to None.
Raises:
Exception: If there is an error tracking the download event.
"""
try:
await prisma.models.AnalyticsTracker.prisma().upsert(
where={"agentId": agent_id},
data={
"update": {"downloads": {"increment": 1}},
"create": {"agentId": agent_id, "downloads": 1, "views": 0},
},
)
except Exception as e:
raise Exception(f"Error tracking download event: {str(e)}")
async def track_view(agent_id: str):
"""
Track the view event in the database.
Args:
agent_id (str): The ID of the agent.
version (int | None, optional): The version of the agent. Defaults to None.
Raises:
Exception: If there is an error tracking the view event.
"""
try:
await prisma.models.AnalyticsTracker.prisma().upsert(
where={"agentId": agent_id},
data={
"update": {"views": {"increment": 1}},
"create": {"agentId": agent_id, "downloads": 0, "views": 1},
},
)
except Exception as e:
raise Exception(f"Error tracking view event: {str(e)}")

View File

@ -1,5 +0,0 @@
import prisma.models
class AgentsWithRank(prisma.models.Agents):
rank: float

View File

@ -1,6 +0,0 @@
import prisma.models
prisma.models.Agents.create_partial(
"AgentOnlyDescriptionNameAuthorIdCategories",
include={"name", "author", "id", "categories"},
)

View File

@ -1,61 +0,0 @@
-- CreateTable
CREATE TABLE "Agents" (
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"version" INTEGER NOT NULL DEFAULT 1,
"name" TEXT,
"description" TEXT,
"author" TEXT,
"keywords" TEXT[],
"categories" TEXT[],
"search" tsvector DEFAULT ''::tsvector,
"graph" JSONB NOT NULL,
CONSTRAINT "Agents_pkey" PRIMARY KEY ("id","version")
);
-- CreateTable
CREATE TABLE "AnalyticsTracker" (
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
"agentId" UUID NOT NULL,
"views" INTEGER NOT NULL,
"downloads" INTEGER NOT NULL
);
-- CreateIndex
CREATE UNIQUE INDEX "Agents_id_key" ON "Agents"("id");
-- CreateIndex
CREATE UNIQUE INDEX "AnalyticsTracker_id_key" ON "AnalyticsTracker"("id");
-- AddForeignKey
ALTER TABLE "AnalyticsTracker" ADD CONSTRAINT "AnalyticsTracker_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agents"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- Add trigger to update the search column with the tsvector of the agent
-- Function to be invoked by trigger
CREATE OR REPLACE FUNCTION update_tsvector_column() RETURNS TRIGGER AS $$
BEGIN
NEW.search := to_tsvector('english', COALESCE(NEW.description, '')|| ' ' ||COALESCE(NEW.name, '')|| ' ' ||COALESCE(NEW.author, ''));
RETURN NEW;
END;
$$ LANGUAGE plpgsql SECURITY definer SET search_path = public, pg_temp;
-- Trigger that keeps the TSVECTOR up to date
DROP TRIGGER IF EXISTS "update_tsvector" ON "Agents";
CREATE TRIGGER "update_tsvector"
BEFORE INSERT OR UPDATE ON "Agents"
FOR EACH ROW
EXECUTE FUNCTION update_tsvector_column ();

View File

@ -1,11 +0,0 @@
/*
Warnings:
- A unique constraint covering the columns `[agentId]` on the table `AnalyticsTracker` will be added. If there are existing duplicate values, this will fail.
*/
-- AlterTable
ALTER TABLE "AnalyticsTracker" ADD CONSTRAINT "AnalyticsTracker_pkey" PRIMARY KEY ("id");
-- CreateIndex
CREATE UNIQUE INDEX "AnalyticsTracker_agentId_key" ON "AnalyticsTracker"("agentId");

View File

@ -1,20 +0,0 @@
-- CreateTable
CREATE TABLE "FeaturedAgent" (
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
"agentId" UUID NOT NULL,
"is_featured" BOOLEAN NOT NULL,
"category" TEXT NOT NULL DEFAULT 'featured',
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "FeaturedAgent_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "FeaturedAgent_id_key" ON "FeaturedAgent"("id");
-- CreateIndex
CREATE UNIQUE INDEX "FeaturedAgent_agentId_key" ON "FeaturedAgent"("agentId");
-- AddForeignKey
ALTER TABLE "FeaturedAgent" ADD CONSTRAINT "FeaturedAgent_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agents"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

View File

@ -1,2 +0,0 @@
-- AlterTable
ALTER TABLE "FeaturedAgent" ALTER COLUMN "is_featured" SET DEFAULT false;

View File

@ -1,8 +0,0 @@
-- CreateEnum
CREATE TYPE "SubmissionStatus" AS ENUM ('PENDING', 'APPROVED', 'REJECTED');
-- AlterTable
ALTER TABLE "Agents" ADD COLUMN "submissionDate" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
ADD COLUMN "submissionReviewComments" TEXT,
ADD COLUMN "submissionReviewDate" TIMESTAMP(3),
ADD COLUMN "submissionStatus" "SubmissionStatus" NOT NULL DEFAULT 'PENDING';

View File

@ -1,12 +0,0 @@
/*
Warnings:
- You are about to drop the column `category` on the `FeaturedAgent` table. All the data in the column will be lost.
- You are about to drop the column `is_featured` on the `FeaturedAgent` table. All the data in the column will be lost.
*/
-- AlterTable
ALTER TABLE "FeaturedAgent" DROP COLUMN "category",
DROP COLUMN "is_featured",
ADD COLUMN "featuredCategories" TEXT[],
ADD COLUMN "isActive" BOOLEAN NOT NULL DEFAULT false;

View File

@ -1,19 +0,0 @@
-- CreateEnum
CREATE TYPE "InstallationLocation" AS ENUM ('LOCAL', 'CLOUD');
-- CreateTable
CREATE TABLE "InstallTracker" (
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
"marketplaceAgentId" UUID NOT NULL,
"installedAgentId" UUID NOT NULL,
"installationLocation" "InstallationLocation" NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "InstallTracker_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "InstallTracker_marketplaceAgentId_installedAgentId_key" ON "InstallTracker"("marketplaceAgentId", "installedAgentId");
-- AddForeignKey
ALTER TABLE "InstallTracker" ADD CONSTRAINT "InstallTracker_marketplaceAgentId_fkey" FOREIGN KEY ("marketplaceAgentId") REFERENCES "Agents"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

Some files were not shown because too many files have changed in this diff Show More